@@ -0,0 +1,82 @@ | |||
import logging | |||
import traceback | |||
from django.conf import settings | |||
from django.contrib.auth.hashers import check_password | |||
from django.contrib.auth.models import User | |||
from ldap3 import Server, Connection, ALL, NTLM, ALL_ATTRIBUTES | |||
from ldap3.core.exceptions import LDAPSocketOpenError | |||
import mysite.settings | |||
class LdapBackend(object): | |||
""" | |||
Authenticate against a LDAP directory. | |||
""" | |||
log = logging.getLogger('mysite') | |||
def check_login(self, username, password): | |||
server = Server(mysite.settings.LDAP_SERVER, connect_timeout=8) | |||
qualified_user = mysite.settings.LDAP_DOMAIN + '\\' + username | |||
conn = Connection(server, qualified_user, password=password, authentication=NTLM) | |||
try: | |||
conn.bind() | |||
except LDAPSocketOpenError: | |||
# LDAP Server nicht erreichbar | |||
self.log.info("LDAP check_login: Server not reachable.") | |||
return None | |||
except: | |||
var = traceback.format_exc() | |||
self.log.info("LDAP check_login(bind): Unexpected Error %s" % var) | |||
return None | |||
result = None | |||
try: | |||
if conn.extend.standard.who_am_i() != None: | |||
conn.search( | |||
search_base='DC=' + mysite.settings.LDAP_DOMAIN + ',DC=fh-nuernberg,DC=de', | |||
search_filter='(&(objectclass=user)(CN=' + username + '))', attributes=ALL_ATTRIBUTES) | |||
info = conn.entries[0] | |||
result = {'lastname' : str(info.sn), | |||
'givenname' : str(info.givenName), | |||
'login' : str(info.cn), | |||
'department' : str(info.department), | |||
'role' : str(info.description)} | |||
self.log.info("LDAP check_login: %s" % result) | |||
except: | |||
var = traceback.format_exc() | |||
self.log.info("LDAP check_login: Unexpected Error %s" % var) | |||
conn.unbind() | |||
return result | |||
def authenticate(self, request, username=None, password=None): | |||
ldap_user = self.check_login(username,password) | |||
if ldap_user: | |||
# {'lastname': 'Hofmann', 'givenname': 'Oliver', 'login': 'hofmannol', 'department': 'EFI', 'role': 'PF'} | |||
# {'lastname': 'Wimmer', 'givenname': 'Martin', 'login': 'wimmerma', 'department': 'EFI', 'role': 'MA'} | |||
# {'lastname': 'Mueller', 'givenname': 'Vincent', 'login': 'muellervi56608', 'department': 'EFI', 'role': 'ST'} | |||
# {'lastname': 'Poehlau', 'givenname': 'Frank', 'login': 'poehlaufr', 'department': 'EFI', 'role': 'PF'} | |||
try: | |||
user = User.objects.get(username=ldap_user['login']) | |||
except User.DoesNotExist: | |||
self.log.info("LDAP authenticate: create new user %s" % ldap_user['login']) | |||
user = User(username=ldap_user['login']) | |||
user.first_name = ldap_user['givenname'] | |||
user.last_name = ldap_user['lastname'] | |||
user.is_staff = (ldap_user['role'] != 'ST') | |||
user.is_superuser = False | |||
user.save() | |||
return user | |||
return None | |||
def get_user(self, user_id): | |||
try: | |||
return User.objects.get(pk=user_id) | |||
except User.DoesNotExist: | |||
return None | |||
@@ -164,6 +164,6 @@ if DEVELOPMENT: | |||
else: | |||
AUTHENTICATION_BACKENDS = [ | |||
'django.contrib.auth.backends.ModelBackend', | |||
'medinf.ldap_backend.LdapBackend', | |||
'mysite.ldap_backend.LdapBackend', | |||
] | |||
print(" --- Live stage --- ") |
@@ -15,16 +15,16 @@ Including another URLconf | |||
""" | |||
from django.contrib import admin | |||
from django.conf.urls import include, url | |||
from django.views.generic import TemplateView | |||
from django.contrib.auth import views | |||
import application.views | |||
urlpatterns = [ | |||
url(r'^$', TemplateView.as_view(template_name="index.html")), | |||
url(r'^admin/', admin.site.urls), | |||
url(r'^navlogin/', application.views.navlogin, name='navlogin'), | |||
url(r'^accounts/login/$', views.login, name='login'), | |||
url(r'^accounts/logout/$', views.logout, | |||
name='logout', kwargs={'next_page': '/'}), | |||
url(r'', include('application.urls')), | |||
] | |||
url(r'^accounts/', include('django.contrib.auth.urls')), | |||
] |
@@ -0,0 +1,12 @@ | |||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||
# EASY-INSTALL-ENTRY-SCRIPT: 'autopep8==1.3.5','console_scripts','autopep8' | |||
__requires__ = 'autopep8==1.3.5' | |||
import re | |||
import sys | |||
from pkg_resources import load_entry_point | |||
if __name__ == '__main__': | |||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||
sys.exit( | |||
load_entry_point('autopep8==1.3.5', 'console_scripts', 'autopep8')() | |||
) |
@@ -0,0 +1,11 @@ | |||
#!/Users/Esthi/thesis_ek/thesisenv/bin/python | |||
# -*- coding: utf-8 -*- | |||
import re | |||
import sys | |||
from pycodestyle import _main | |||
if __name__ == '__main__': | |||
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0]) | |||
sys.exit(_main()) |
@@ -0,0 +1,385 @@ | |||
Metadata-Version: 1.1 | |||
Name: autopep8 | |||
Version: 1.3.5 | |||
Summary: A tool that automatically formats Python code to conform to the PEP 8 style guide | |||
Home-page: https://github.com/hhatto/autopep8 | |||
Author: Hideo Hattori | |||
Author-email: hhatto.jp@gmail.com | |||
License: Expat License | |||
Description: ======== | |||
autopep8 | |||
======== | |||
.. image:: https://img.shields.io/pypi/v/autopep8.svg | |||
:target: https://pypi.python.org/pypi/autopep8 | |||
:alt: PyPI Version | |||
.. image:: https://travis-ci.org/hhatto/autopep8.svg?branch=master | |||
:target: https://travis-ci.org/hhatto/autopep8 | |||
:alt: Build status | |||
autopep8 automatically formats Python code to conform to the `PEP 8`_ style | |||
guide. It uses the pycodestyle_ utility to determine what parts of the code | |||
needs to be formatted. autopep8 is capable of fixing most of the formatting | |||
issues_ that can be reported by pycodestyle. | |||
.. _PEP 8: https://www.python.org/dev/peps/pep-0008/ | |||
.. _issues: https://pycodestyle.readthedocs.org/en/latest/intro.html#error-codes | |||
.. contents:: | |||
Installation | |||
============ | |||
From pip:: | |||
$ pip install --upgrade autopep8 | |||
Consider using the ``--user`` option_. | |||
.. _option: https://pip.pypa.io/en/latest/user_guide/#user-installs | |||
Requirements | |||
============ | |||
autopep8 requires pycodestyle_. | |||
.. _pycodestyle: https://github.com/PyCQA/pycodestyle | |||
Usage | |||
===== | |||
To modify a file in place (with aggressive level 2):: | |||
$ autopep8 --in-place --aggressive --aggressive <filename> | |||
Before running autopep8. | |||
.. code-block:: python | |||
import math, sys; | |||
def example1(): | |||
####This is a long comment. This should be wrapped to fit within 72 characters. | |||
some_tuple=( 1,2, 3,'a' ); | |||
some_variable={'long':'Long code lines should be wrapped within 79 characters.', | |||
'other':[math.pi, 100,200,300,9876543210,'This is a long string that goes on'], | |||
'more':{'inner':'This whole logical line should be wrapped.',some_tuple:[1, | |||
20,300,40000,500000000,60000000000000000]}} | |||
return (some_tuple, some_variable) | |||
def example2(): return {'has_key() is deprecated':True}.has_key({'f':2}.has_key('')); | |||
class Example3( object ): | |||
def __init__ ( self, bar ): | |||
#Comments should have a space after the hash. | |||
if bar : bar+=1; bar=bar* bar ; return bar | |||
else: | |||
some_string = """ | |||
Indentation in multiline strings should not be touched. | |||
Only actual code should be reindented. | |||
""" | |||
return (sys.path, some_string) | |||
After running autopep8. | |||
.. code-block:: python | |||
import math | |||
import sys | |||
def example1(): | |||
# This is a long comment. This should be wrapped to fit within 72 | |||
# characters. | |||
some_tuple = (1, 2, 3, 'a') | |||
some_variable = { | |||
'long': 'Long code lines should be wrapped within 79 characters.', | |||
'other': [ | |||
math.pi, | |||
100, | |||
200, | |||
300, | |||
9876543210, | |||
'This is a long string that goes on'], | |||
'more': { | |||
'inner': 'This whole logical line should be wrapped.', | |||
some_tuple: [ | |||
1, | |||
20, | |||
300, | |||
40000, | |||
500000000, | |||
60000000000000000]}} | |||
return (some_tuple, some_variable) | |||
def example2(): return ('' in {'f': 2}) in {'has_key() is deprecated': True} | |||
class Example3(object): | |||
def __init__(self, bar): | |||
# Comments should have a space after the hash. | |||
if bar: | |||
bar += 1 | |||
bar = bar * bar | |||
return bar | |||
else: | |||
some_string = """ | |||
Indentation in multiline strings should not be touched. | |||
Only actual code should be reindented. | |||
""" | |||
return (sys.path, some_string) | |||
Options:: | |||
usage: autopep8 [-h] [--version] [-v] [-d] [-i] [--global-config filename] | |||
[--ignore-local-config] [-r] [-j n] [-p n] [-a] | |||
[--experimental] [--exclude globs] [--list-fixes] | |||
[--ignore errors] [--select errors] [--max-line-length n] | |||
[--line-range line line] | |||
[files [files ...]] | |||
Automatically formats Python code to conform to the PEP 8 style guide. | |||
positional arguments: | |||
files files to format or '-' for standard in | |||
optional arguments: | |||
-h, --help show this help message and exit | |||
--version show program's version number and exit | |||
-v, --verbose print verbose messages; multiple -v result in more | |||
verbose messages | |||
-d, --diff print the diff for the fixed source | |||
-i, --in-place make changes to files in place | |||
--global-config filename | |||
path to a global pep8 config file; if this file does | |||
not exist then this is ignored (default: | |||
~/.config/pep8) | |||
--ignore-local-config | |||
don't look for and apply local config files; if not | |||
passed, defaults are updated with any config files in | |||
the project's root directory | |||
-r, --recursive run recursively over directories; must be used with | |||
--in-place or --diff | |||
-j n, --jobs n number of parallel jobs; match CPU count if value is | |||
less than 1 | |||
-p n, --pep8-passes n | |||
maximum number of additional pep8 passes (default: | |||
infinite) | |||
-a, --aggressive enable non-whitespace changes; multiple -a result in | |||
more aggressive changes | |||
--experimental enable experimental fixes | |||
--exclude globs exclude file/directory names that match these comma- | |||
separated globs | |||
--list-fixes list codes for fixes; used by --ignore and --select | |||
--ignore errors do not fix these errors/warnings (default: E24) | |||
--select errors fix only these errors/warnings (e.g. E4,W) | |||
--max-line-length n set maximum allowed line length (default: 79) | |||
--line-range line line, --range line line | |||
only fix errors found within this inclusive range of | |||
line numbers (e.g. 1 99); line numbers are indexed at | |||
1 | |||
Features | |||
======== | |||
autopep8 fixes the following issues_ reported by pycodestyle_:: | |||
E101 - Reindent all lines. | |||
E11 - Fix indentation. (not include E112 and E113) | |||
E121 - Fix indentation to be a multiple of four. | |||
E122 - Add absent indentation for hanging indentation. | |||
E123 - Align closing bracket to match opening bracket. | |||
E124 - Align closing bracket to match visual indentation. | |||
E125 - Indent to distinguish line from next logical line. | |||
E126 - Fix over-indented hanging indentation. | |||
E127 - Fix visual indentation. | |||
E128 - Fix visual indentation. | |||
E20 - Remove extraneous whitespace. | |||
E211 - Remove extraneous whitespace. | |||
E22 - Fix extraneous whitespace around keywords. | |||
E224 - Remove extraneous whitespace around operator. | |||
E226 - Fix missing whitespace around arithmetic operator. | |||
E227 - Fix missing whitespace around bitwise/shift operator. | |||
E228 - Fix missing whitespace around modulo operator. | |||
E231 - Add missing whitespace. | |||
E241 - Fix extraneous whitespace around keywords. | |||
E242 - Remove extraneous whitespace around operator. | |||
E251 - Remove whitespace around parameter '=' sign. | |||
E26 - Fix spacing after comment hash for inline comments. | |||
E265 - Fix spacing after comment hash for block comments. | |||
E27 - Fix extraneous whitespace around keywords. | |||
E301 - Add missing blank line. | |||
E302 - Add missing 2 blank lines. | |||
E303 - Remove extra blank lines. | |||
E304 - Remove blank line following function decorator. | |||
E306 - Expected 1 blank line before a nested definition | |||
E401 - Put imports on separate lines. | |||
E501 - Try to make lines fit within --max-line-length characters. | |||
E502 - Remove extraneous escape of newline. | |||
E701 - Put colon-separated compound statement on separate lines. | |||
E70 - Put semicolon-separated compound statement on separate lines. | |||
E711 - Fix comparison with None. | |||
E712 - Fix comparison with boolean. | |||
E721 - Use "isinstance()" instead of comparing types directly. | |||
E722 - Fix bare except. | |||
W291 - Remove trailing whitespace. | |||
W292 - Add a single newline at the end of the file. | |||
W293 - Remove trailing whitespace on blank line. | |||
W391 - Remove trailing blank lines. | |||
W601 - Use "in" rather than "has_key()". | |||
W602 - Fix deprecated form of raising exception. | |||
W603 - Use "!=" instead of "<>" | |||
W604 - Use "repr()" instead of backticks. | |||
W690 - Fix various deprecated code (via lib2to3). | |||
autopep8 also fixes some issues not found by pycodestyle_. | |||
- Correct deprecated or non-idiomatic Python code (via ``lib2to3``). Use this | |||
for making Python 2.7 code more compatible with Python 3. (This is triggered | |||
if ``W690`` is enabled.) | |||
- Normalize files with mixed line endings. | |||
- Put a blank line between a class docstring and its first method | |||
declaration. (Enabled with ``E301``.) | |||
- Remove blank lines between a function declaration and its docstring. (Enabled | |||
with ``E303``.) | |||
autopep8 avoids fixing some issues found by pycodestyle_. | |||
- ``E112``/``E113`` for non comments are reports of bad indentation that break | |||
syntax rules. These should not be modified at all. | |||
- ``E265``, which refers to spacing after comment hash, is ignored if the | |||
comment looks like code. autopep8 avoids modifying these since they are not | |||
real comments. If you really want to get rid of the pycodestyle_ warning, | |||
consider just removing the commented-out code. (This can be automated via | |||
eradicate_.) | |||
.. _eradicate: https://github.com/myint/eradicate | |||
More advanced usage | |||
=================== | |||
By default autopep8 only makes whitespace changes. Thus, by default, it does | |||
not fix ``E711`` and ``E712``. (Changing ``x == None`` to ``x is None`` may | |||
change the meaning of the program if ``x`` has its ``__eq__`` method | |||
overridden.) Nor does it correct deprecated code ``W6``. To enable these | |||
more aggressive fixes, use the ``--aggressive`` option:: | |||
$ autopep8 --aggressive <filename> | |||
Use multiple ``--aggressive`` to increase the aggressiveness level. For | |||
example, ``E712`` requires aggressiveness level 2 (since ``x == True`` could be | |||
changed to either ``x`` or ``x is True``, but autopep8 chooses the former). | |||
``--aggressive`` will also shorten lines more aggressively. It will also remove | |||
trailing whitespace more aggressively. (Usually, we don't touch trailing | |||
whitespace in docstrings and other multiline strings. And to do even more | |||
aggressive changes to docstrings, use docformatter_.) | |||
.. _docformatter: https://github.com/myint/docformatter | |||
To enable only a subset of the fixes, use the ``--select`` option. For example, | |||
to fix various types of indentation issues:: | |||
$ autopep8 --select=E1,W1 <filename> | |||
Similarly, to just fix deprecated code:: | |||
$ autopep8 --aggressive --select=W6 <filename> | |||
The above is useful when trying to port a single code base to work with both | |||
Python 2 and Python 3 at the same time. | |||
If the file being fixed is large, you may want to enable verbose progress | |||
messages:: | |||
$ autopep8 -v <filename> | |||
Use as a module | |||
=============== | |||
The simplest way of using autopep8 as a module is via the ``fix_code()`` | |||
function: | |||
>>> import autopep8 | |||
>>> autopep8.fix_code('x= 123\n') | |||
'x = 123\n' | |||
Or with options: | |||
>>> import autopep8 | |||
>>> autopep8.fix_code('x.has_key(y)\n', | |||
... options={'aggressive': 1}) | |||
'y in x\n' | |||
>>> autopep8.fix_code('print( 123 )\n', | |||
... options={'ignore': ['E']}) | |||
'print( 123 )\n' | |||
Testing | |||
======= | |||
Test cases are in ``test/test_autopep8.py``. They can be run directly via | |||
``python test/test_autopep8.py`` or via tox_. The latter is useful for | |||
testing against multiple Python interpreters. (We currently test against | |||
CPython versions 2.7, 3.4, 3.5 and 3.6. We also test against PyPy.) | |||
.. _`tox`: https://pypi.python.org/pypi/tox | |||
Broad spectrum testing is available via ``test/acid.py``. This script runs | |||
autopep8 against Python code and checks for correctness and completeness of the | |||
code fixes. It can check that the bytecode remains identical. | |||
``test/acid_pypi.py`` makes use of ``acid.py`` to test against the latest | |||
released packages on PyPI. | |||
Troubleshooting | |||
=============== | |||
``pkg_resources.DistributionNotFound`` | |||
-------------------------------------- | |||
If you are using an ancient version of ``setuptools``, you might encounter | |||
``pkg_resources.DistributionNotFound`` when trying to run ``autopep8``. Try | |||
upgrading ``setuptools`` to workaround this ``setuptools`` problem:: | |||
$ pip install --upgrade setuptools | |||
Use ``sudo`` if you are installing to the system. | |||
Links | |||
===== | |||
* PyPI_ | |||
* GitHub_ | |||
* `Travis CI`_ | |||
* Coveralls_ | |||
.. _PyPI: https://pypi.python.org/pypi/autopep8/ | |||
.. _GitHub: https://github.com/hhatto/autopep8 | |||
.. _`Travis CI`: https://travis-ci.org/hhatto/autopep8 | |||
.. _`Coveralls`: https://coveralls.io/r/hhatto/autopep8 | |||
Keywords: automation,pep8,format,pycodestyle | |||
Platform: UNKNOWN | |||
Classifier: Development Status :: 5 - Production/Stable | |||
Classifier: Environment :: Console | |||
Classifier: Intended Audience :: Developers | |||
Classifier: License :: OSI Approved :: MIT License | |||
Classifier: Operating System :: OS Independent | |||
Classifier: Programming Language :: Python | |||
Classifier: Programming Language :: Python :: 2 | |||
Classifier: Programming Language :: Python :: 2.7 | |||
Classifier: Programming Language :: Python :: 3 | |||
Classifier: Programming Language :: Python :: 3.4 | |||
Classifier: Programming Language :: Python :: 3.5 | |||
Classifier: Programming Language :: Python :: 3.6 | |||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |||
Classifier: Topic :: Software Development :: Quality Assurance |
@@ -0,0 +1,25 @@ | |||
AUTHORS.rst | |||
LICENSE | |||
MANIFEST.in | |||
README.rst | |||
autopep8.py | |||
setup.cfg | |||
setup.py | |||
autopep8.egg-info/PKG-INFO | |||
autopep8.egg-info/SOURCES.txt | |||
autopep8.egg-info/dependency_links.txt | |||
autopep8.egg-info/entry_points.txt | |||
autopep8.egg-info/not-zip-safe | |||
autopep8.egg-info/requires.txt | |||
autopep8.egg-info/top_level.txt | |||
test/__init__.py | |||
test/bad_encoding.py | |||
test/bad_encoding2.py | |||
test/e101_example.py | |||
test/example.py | |||
test/example_with_reduce.py | |||
test/iso_8859_1.py | |||
test/test_autopep8.py | |||
test/test_suite.py | |||
test/fake_configuration/.pep8 | |||
test/fake_pycodestyle_configuration/tox.ini |
@@ -0,0 +1 @@ | |||
@@ -0,0 +1,3 @@ | |||
[console_scripts] | |||
autopep8 = autopep8:main | |||
@@ -0,0 +1,10 @@ | |||
../../../../bin/autopep8 | |||
../__pycache__/autopep8.cpython-36.pyc | |||
../autopep8.py | |||
PKG-INFO | |||
SOURCES.txt | |||
dependency_links.txt | |||
entry_points.txt | |||
not-zip-safe | |||
requires.txt | |||
top_level.txt |
@@ -0,0 +1 @@ | |||
@@ -0,0 +1 @@ | |||
pycodestyle>=2.3 |
@@ -0,0 +1 @@ | |||
autopep8 |
@@ -0,0 +1,105 @@ | |||
""" | |||
ldap - base module | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
# This is also the overall release version number | |||
from ldap.pkginfo import __version__, __author__, __license__ | |||
import os | |||
import sys | |||
if __debug__: | |||
# Tracing is only supported in debugging mode | |||
import atexit | |||
import traceback | |||
_trace_level = int(os.environ.get("PYTHON_LDAP_TRACE_LEVEL", 0)) | |||
_trace_file = os.environ.get("PYTHON_LDAP_TRACE_FILE") | |||
if _trace_file is None: | |||
_trace_file = sys.stderr | |||
else: | |||
_trace_file = open(_trace_file, 'a') | |||
atexit.register(_trace_file.close) | |||
_trace_stack_limit = None | |||
import _ldap | |||
assert _ldap.__version__==__version__, \ | |||
ImportError('ldap %s and _ldap %s version mismatch!' % (__version__,_ldap.__version__)) | |||
from _ldap import * | |||
# call into libldap to initialize it right now | |||
LIBLDAP_API_INFO = _ldap.get_option(_ldap.OPT_API_INFO) | |||
OPT_NAMES_DICT = {} | |||
for k,v in vars(_ldap).items(): | |||
if k.startswith('OPT_'): | |||
OPT_NAMES_DICT[v]=k | |||
class DummyLock: | |||
"""Define dummy class with methods compatible to threading.Lock""" | |||
def __init__(self): | |||
pass | |||
def acquire(self): | |||
pass | |||
def release(self): | |||
pass | |||
try: | |||
# Check if Python installation was build with thread support | |||
import thread | |||
except ImportError: | |||
LDAPLockBaseClass = DummyLock | |||
else: | |||
import threading | |||
LDAPLockBaseClass = threading.Lock | |||
class LDAPLock: | |||
""" | |||
Mainly a wrapper class to log all locking events. | |||
Note that this cumbersome approach with _lock attribute was taken | |||
since threading.Lock is not suitable for sub-classing. | |||
""" | |||
_min_trace_level = 3 | |||
def __init__(self,lock_class=None,desc=''): | |||
""" | |||
lock_class | |||
Class compatible to threading.Lock | |||
desc | |||
Description shown in debug log messages | |||
""" | |||
self._desc = desc | |||
self._lock = (lock_class or LDAPLockBaseClass)() | |||
def acquire(self): | |||
if __debug__: | |||
global _trace_level | |||
if _trace_level>=self._min_trace_level: | |||
_trace_file.write('***%s.acquire() %s %s\n' % (self.__class__.__name__,repr(self),self._desc)) | |||
return self._lock.acquire() | |||
def release(self): | |||
if __debug__: | |||
global _trace_level | |||
if _trace_level>=self._min_trace_level: | |||
_trace_file.write('***%s.release() %s %s\n' % (self.__class__.__name__,repr(self),self._desc)) | |||
return self._lock.release() | |||
# Create module-wide lock for serializing all calls into underlying LDAP lib | |||
_ldap_module_lock = LDAPLock(desc='Module wide') | |||
from ldap.functions import initialize,get_option,set_option,escape_str,strf_secs,strp_secs | |||
from ldap.ldapobject import NO_UNIQUE_ENTRY, LDAPBytesWarning | |||
from ldap.dn import explode_dn,explode_rdn,str2dn,dn2str | |||
del str2dn | |||
del dn2str | |||
# More constants | |||
# For compatibility of 2.3 and 2.4 OpenLDAP API | |||
OPT_DIAGNOSTIC_MESSAGE = OPT_ERROR_STRING |
@@ -0,0 +1,15 @@ | |||
""" | |||
ldap.asyncsearch - handle async LDAP search operations | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import warnings | |||
from ldap.asyncsearch import * | |||
from ldap.asyncsearch import __version__ | |||
warnings.warn( | |||
"'ldap.async module' is deprecated, import 'ldap.asyncsearch' instead.", | |||
DeprecationWarning, | |||
stacklevel=2 | |||
) |
@@ -0,0 +1,284 @@ | |||
""" | |||
ldap.asyncsearch - handle async LDAP search operations | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import ldap | |||
from ldap import __version__ | |||
import ldif | |||
SEARCH_RESULT_TYPES = { | |||
ldap.RES_SEARCH_ENTRY, | |||
ldap.RES_SEARCH_RESULT, | |||
ldap.RES_SEARCH_REFERENCE, | |||
} | |||
ENTRY_RESULT_TYPES = { | |||
ldap.RES_SEARCH_ENTRY, | |||
ldap.RES_SEARCH_RESULT, | |||
} | |||
class WrongResultType(Exception): | |||
def __init__(self,receivedResultType,expectedResultTypes): | |||
self.receivedResultType = receivedResultType | |||
self.expectedResultTypes = expectedResultTypes | |||
Exception.__init__(self) | |||
def __str__(self): | |||
return 'Received wrong result type %s (expected one of %s).' % ( | |||
self.receivedResultType, | |||
', '.join(self.expectedResultTypes), | |||
) | |||
class AsyncSearchHandler: | |||
""" | |||
Class for stream-processing LDAP search results | |||
Arguments: | |||
l | |||
LDAPObject instance | |||
""" | |||
def __init__(self,l): | |||
self._l = l | |||
self._msgId = None | |||
self._afterFirstResult = 1 | |||
def startSearch( | |||
self, | |||
searchRoot, | |||
searchScope, | |||
filterStr, | |||
attrList=None, | |||
attrsOnly=0, | |||
timeout=-1, | |||
sizelimit=0, | |||
serverctrls=None, | |||
clientctrls=None | |||
): | |||
""" | |||
searchRoot | |||
See parameter base of method LDAPObject.search() | |||
searchScope | |||
See parameter scope of method LDAPObject.search() | |||
filterStr | |||
See parameter filter of method LDAPObject.search() | |||
attrList=None | |||
See parameter attrlist of method LDAPObject.search() | |||
attrsOnly | |||
See parameter attrsonly of method LDAPObject.search() | |||
timeout | |||
Maximum time the server shall use for search operation | |||
sizelimit | |||
Maximum number of entries a server should return | |||
(request client-side limit) | |||
serverctrls | |||
list of server-side LDAP controls | |||
clientctrls | |||
list of client-side LDAP controls | |||
""" | |||
self._msgId = self._l.search_ext( | |||
searchRoot,searchScope,filterStr, | |||
attrList,attrsOnly,serverctrls,clientctrls,timeout,sizelimit | |||
) | |||
self._afterFirstResult = 1 | |||
return # startSearch() | |||
def preProcessing(self): | |||
""" | |||
Do anything you want after starting search but | |||
before receiving and processing results | |||
""" | |||
def afterFirstResult(self): | |||
""" | |||
Do anything you want right after successfully receiving but before | |||
processing first result | |||
""" | |||
def postProcessing(self): | |||
""" | |||
Do anything you want after receiving and processing all results | |||
""" | |||
def processResults(self,ignoreResultsNumber=0,processResultsCount=0,timeout=-1): | |||
""" | |||
ignoreResultsNumber | |||
Don't process the first ignoreResultsNumber results. | |||
processResultsCount | |||
If non-zero this parameters indicates the number of results | |||
processed is limited to processResultsCount. | |||
timeout | |||
See parameter timeout of ldap.LDAPObject.result() | |||
""" | |||
self.preProcessing() | |||
result_counter = 0 | |||
end_result_counter = ignoreResultsNumber+processResultsCount | |||
go_ahead = 1 | |||
partial = 0 | |||
self.beginResultsDropped = 0 | |||
self.endResultBreak = result_counter | |||
try: | |||
result_type,result_list = None,None | |||
while go_ahead: | |||
while result_type is None and not result_list: | |||
result_type,result_list,result_msgid,result_serverctrls = self._l.result3(self._msgId,0,timeout) | |||
if self._afterFirstResult: | |||
self.afterFirstResult() | |||
self._afterFirstResult = 0 | |||
if not result_list: | |||
break | |||
if result_type not in SEARCH_RESULT_TYPES: | |||
raise WrongResultType(result_type,SEARCH_RESULT_TYPES) | |||
# Loop over list of search results | |||
for result_item in result_list: | |||
if result_counter<ignoreResultsNumber: | |||
self.beginResultsDropped = self.beginResultsDropped+1 | |||
elif processResultsCount==0 or result_counter<end_result_counter: | |||
self._processSingleResult(result_type,result_item) | |||
else: | |||
go_ahead = 0 # break-out from while go_ahead | |||
partial = 1 | |||
break # break-out from this for-loop | |||
result_counter = result_counter+1 | |||
result_type,result_list = None,None | |||
self.endResultBreak = result_counter | |||
finally: | |||
if partial and self._msgId!=None: | |||
self._l.abandon(self._msgId) | |||
self.postProcessing() | |||
return partial # processResults() | |||
def _processSingleResult(self,resultType,resultItem): | |||
""" | |||
Process single entry | |||
resultType | |||
result type | |||
resultItem | |||
Single item of a result list | |||
""" | |||
pass | |||
class List(AsyncSearchHandler): | |||
""" | |||
Class for collecting all search results. | |||
This does not seem to make sense in the first place but think | |||
of retrieving exactly a certain portion of the available search | |||
results. | |||
""" | |||
def __init__(self,l): | |||
AsyncSearchHandler.__init__(self,l) | |||
self.allResults = [] | |||
def _processSingleResult(self,resultType,resultItem): | |||
self.allResults.append((resultType,resultItem)) | |||
class Dict(AsyncSearchHandler): | |||
""" | |||
Class for collecting all search results into a dictionary {dn:entry} | |||
""" | |||
def __init__(self,l): | |||
AsyncSearchHandler.__init__(self,l) | |||
self.allEntries = {} | |||
def _processSingleResult(self,resultType,resultItem): | |||
if resultType in ENTRY_RESULT_TYPES: | |||
# Search continuations are ignored | |||
dn,entry = resultItem | |||
self.allEntries[dn] = entry | |||
class IndexedDict(Dict): | |||
""" | |||
Class for collecting all search results into a dictionary {dn:entry} | |||
and maintain case-sensitive equality indexes to entries | |||
""" | |||
def __init__(self,l,indexed_attrs=None): | |||
Dict.__init__(self,l) | |||
self.indexed_attrs = indexed_attrs or () | |||
self.index = {}.fromkeys(self.indexed_attrs,{}) | |||
def _processSingleResult(self,resultType,resultItem): | |||
if resultType in ENTRY_RESULT_TYPES: | |||
# Search continuations are ignored | |||
dn,entry = resultItem | |||
self.allEntries[dn] = entry | |||
for a in self.indexed_attrs: | |||
if a in entry: | |||
for v in entry[a]: | |||
try: | |||
self.index[a][v].append(dn) | |||
except KeyError: | |||
self.index[a][v] = [ dn ] | |||
class FileWriter(AsyncSearchHandler): | |||
""" | |||
Class for writing a stream of LDAP search results to a file object | |||
Arguments: | |||
l | |||
LDAPObject instance | |||
f | |||
File object instance where the LDIF data is written to | |||
""" | |||
def __init__(self,l,f,headerStr='',footerStr=''): | |||
AsyncSearchHandler.__init__(self,l) | |||
self._f = f | |||
self.headerStr = headerStr | |||
self.footerStr = footerStr | |||
def preProcessing(self): | |||
""" | |||
The headerStr is written to output after starting search but | |||
before receiving and processing results. | |||
""" | |||
self._f.write(self.headerStr) | |||
def postProcessing(self): | |||
""" | |||
The footerStr is written to output after receiving and | |||
processing results. | |||
""" | |||
self._f.write(self.footerStr) | |||
class LDIFWriter(FileWriter): | |||
""" | |||
Class for writing a stream LDAP search results to a LDIF file | |||
Arguments: | |||
l | |||
LDAPObject instance | |||
writer_obj | |||
Either a file-like object or a ldif.LDIFWriter instance used for output | |||
""" | |||
def __init__(self,l,writer_obj,headerStr='',footerStr=''): | |||
if isinstance(writer_obj,ldif.LDIFWriter): | |||
self._ldif_writer = writer_obj | |||
else: | |||
self._ldif_writer = ldif.LDIFWriter(writer_obj) | |||
FileWriter.__init__(self,l,self._ldif_writer._output_file,headerStr,footerStr) | |||
def _processSingleResult(self,resultType,resultItem): | |||
if resultType in ENTRY_RESULT_TYPES: | |||
# Search continuations are ignored | |||
dn,entry = resultItem | |||
self._ldif_writer.unparse(dn,entry) |
@@ -0,0 +1,100 @@ | |||
""" | |||
This is a convenience wrapper for dictionaries | |||
returned from LDAP servers containing attribute | |||
names of variable case. | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap import __version__ | |||
from ldap.compat import IterableUserDict | |||
class cidict(IterableUserDict): | |||
""" | |||
Case-insensitive but case-respecting dictionary. | |||
""" | |||
def __init__(self,default=None): | |||
self._keys = {} | |||
IterableUserDict.__init__(self,{}) | |||
self.update(default or {}) | |||
def __getitem__(self,key): | |||
return self.data[key.lower()] | |||
def __setitem__(self,key,value): | |||
lower_key = key.lower() | |||
self._keys[lower_key] = key | |||
self.data[lower_key] = value | |||
def __delitem__(self,key): | |||
lower_key = key.lower() | |||
del self._keys[lower_key] | |||
del self.data[lower_key] | |||
def update(self,dict): | |||
for key, value in dict.items(): | |||
self[key] = value | |||
def has_key(self,key): | |||
return key in self | |||
def __contains__(self,key): | |||
return IterableUserDict.__contains__(self, key.lower()) | |||
def __iter__(self): | |||
return iter(self.keys()) | |||
def keys(self): | |||
return self._keys.values() | |||
def items(self): | |||
result = [] | |||
for k in self._keys.values(): | |||
result.append((k,self[k])) | |||
return result | |||
def strlist_minus(a,b): | |||
""" | |||
Return list of all items in a which are not in b (a - b). | |||
a,b are supposed to be lists of case-insensitive strings. | |||
""" | |||
temp = cidict() | |||
for elt in b: | |||
temp[elt] = elt | |||
result = [ | |||
elt | |||
for elt in a | |||
if elt not in temp | |||
] | |||
return result | |||
def strlist_intersection(a,b): | |||
""" | |||
Return intersection of two lists of case-insensitive strings a,b. | |||
""" | |||
temp = cidict() | |||
for elt in a: | |||
temp[elt] = elt | |||
result = [ | |||
temp[elt] | |||
for elt in b | |||
if elt in temp | |||
] | |||
return result | |||
def strlist_union(a,b): | |||
""" | |||
Return union of two lists of case-insensitive strings a,b. | |||
""" | |||
temp = cidict() | |||
for elt in a: | |||
temp[elt] = elt | |||
for elt in b: | |||
temp[elt] = elt | |||
return temp.values() |
@@ -0,0 +1,113 @@ | |||
"""Compatibility wrappers for Py2/Py3.""" | |||
import sys | |||
import os | |||
if sys.version_info[0] < 3: | |||
from UserDict import UserDict, IterableUserDict | |||
from urllib import quote | |||
from urllib import quote_plus | |||
from urllib import unquote as urllib_unquote | |||
from urllib import urlopen | |||
from urlparse import urlparse | |||
def unquote(uri): | |||
"""Specialized unquote that uses UTF-8 for parsing.""" | |||
uri = uri.encode('ascii') | |||
unquoted = urllib_unquote(uri) | |||
return unquoted.decode('utf-8') | |||
# Old-style of re-raising an exception is SyntaxError in Python 3, | |||
# so hide behind exec() so the Python 3 parser doesn't see it | |||
exec('''def reraise(exc_type, exc_value, exc_traceback): | |||
"""Re-raise an exception given information from sys.exc_info() | |||
Note that unlike six.reraise, this does not support replacing the | |||
traceback. All arguments must come from a single sys.exc_info() call. | |||
""" | |||
raise exc_type, exc_value, exc_traceback | |||
''') | |||
else: | |||
from collections import UserDict | |||
IterableUserDict = UserDict | |||
from urllib.parse import quote, quote_plus, unquote, urlparse | |||
from urllib.request import urlopen | |||
def reraise(exc_type, exc_value, exc_traceback): | |||
"""Re-raise an exception given information from sys.exc_info() | |||
Note that unlike six.reraise, this does not support replacing the | |||
traceback. All arguments must come from a single sys.exc_info() call. | |||
""" | |||
# In Python 3, all exception info is contained in one object. | |||
raise exc_value | |||
try: | |||
from shutil import which | |||
except ImportError: | |||
# shutil.which() from Python 3.6 | |||
# "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, | |||
# 2011, 2012, 2013, 2014, 2015, 2016, 2017 Python Software Foundation; | |||
# All Rights Reserved" | |||
def which(cmd, mode=os.F_OK | os.X_OK, path=None): | |||
"""Given a command, mode, and a PATH string, return the path which | |||
conforms to the given mode on the PATH, or None if there is no such | |||
file. | |||
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result | |||
of os.environ.get("PATH"), or can be overridden with a custom search | |||
path. | |||
""" | |||
# Check that a given file can be accessed with the correct mode. | |||
# Additionally check that `file` is not a directory, as on Windows | |||
# directories pass the os.access check. | |||
def _access_check(fn, mode): | |||
return (os.path.exists(fn) and os.access(fn, mode) | |||
and not os.path.isdir(fn)) | |||
# If we're given a path with a directory part, look it up directly rather | |||
# than referring to PATH directories. This includes checking relative to the | |||
# current directory, e.g. ./script | |||
if os.path.dirname(cmd): | |||
if _access_check(cmd, mode): | |||
return cmd | |||
return None | |||
if path is None: | |||
path = os.environ.get("PATH", os.defpath) | |||
if not path: | |||
return None | |||
path = path.split(os.pathsep) | |||
if sys.platform == "win32": | |||
# The current directory takes precedence on Windows. | |||
if not os.curdir in path: | |||
path.insert(0, os.curdir) | |||
# PATHEXT is necessary to check on Windows. | |||
pathext = os.environ.get("PATHEXT", "").split(os.pathsep) | |||
# See if the given file matches any of the expected path extensions. | |||
# This will allow us to short circuit when given "python.exe". | |||
# If it does match, only test that one, otherwise we have to try | |||
# others. | |||
if any(cmd.lower().endswith(ext.lower()) for ext in pathext): | |||
files = [cmd] | |||
else: | |||
files = [cmd + ext for ext in pathext] | |||
else: | |||
# On other platforms you don't have things like PATHEXT to tell you | |||
# what file suffixes are executable, so just pass on cmd as-is. | |||
files = [cmd] | |||
seen = set() | |||
for dir in path: | |||
normdir = os.path.normcase(dir) | |||
if not normdir in seen: | |||
seen.add(normdir) | |||
for thefile in files: | |||
name = os.path.join(dir, thefile) | |||
if _access_check(name, mode): | |||
return name | |||
return None |
@@ -0,0 +1,404 @@ | |||
"""Definitions for constants exported by OpenLDAP | |||
This file lists all constants we know about, even those that aren't | |||
available in the OpenLDAP version python-ldap is compiled against. | |||
The information serves two purposes: | |||
- Generate a C header with the constants | |||
- Provide support for building documentation without compiling python-ldap | |||
""" | |||
# This module cannot import anything from ldap. | |||
# When building documentation, it is used to initialize ldap.__init__. | |||
from __future__ import print_function | |||
class Constant(object): | |||
"""Base class for a definition of an OpenLDAP constant | |||
""" | |||
def __init__(self, name, optional=False, requirements=(), doc=None): | |||
self.name = name | |||
if optional: | |||
self_requirement = 'defined(LDAP_{})'.format(self.name) | |||
requirements = list(requirements) + [self_requirement] | |||
self.requirements = requirements | |||
self.doc = self.__doc__ = doc | |||
class Error(Constant): | |||
"""Definition for an OpenLDAP error code | |||
This is a constant at the C level; in Python errors are provided as | |||
exception classes. | |||
""" | |||
c_template = 'add_err({self.name});' | |||
class Int(Constant): | |||
"""Definition for an OpenLDAP integer constant""" | |||
c_template = 'add_int({self.name});' | |||
class TLSInt(Int): | |||
"""Definition for a TLS integer constant -- requires HAVE_TLS""" | |||
def __init__(self, *args, **kwargs): | |||
requrements = list(kwargs.get('requirements', ())) | |||
kwargs['requirements'] = ['HAVE_TLS'] + requrements | |||
super(TLSInt, self).__init__(*args, **kwargs) | |||
class Feature(Constant): | |||
"""Definition for a feature: 0 or 1 based on a C #ifdef | |||
""" | |||
c_template = '\n'.join([ | |||
'', | |||
'#ifdef {self.c_feature}', | |||
'if (PyModule_AddIntConstant(m, "{self.name}", 1) != 0) return -1;', | |||
'#else', | |||
'if (PyModule_AddIntConstant(m, "{self.name}", 0) != 0) return -1;', | |||
'#endif', | |||
'', | |||
]) | |||
def __init__(self, name, c_feature, **kwargs): | |||
super(Feature, self).__init__(name, **kwargs) | |||
self.c_feature = c_feature | |||
class Str(Constant): | |||
c_template = 'add_string({self.name});' | |||
API_2004 = 'LDAP_API_VERSION >= 2004' | |||
CONSTANTS = ( | |||
Error('ADMINLIMIT_EXCEEDED'), | |||
Error('AFFECTS_MULTIPLE_DSAS'), | |||
Error('ALIAS_DEREF_PROBLEM'), | |||
Error('ALIAS_PROBLEM'), | |||
Error('ALREADY_EXISTS'), | |||
Error('AUTH_METHOD_NOT_SUPPORTED'), | |||
Error('AUTH_UNKNOWN'), | |||
Error('BUSY'), | |||
Error('CLIENT_LOOP'), | |||
Error('COMPARE_FALSE'), | |||
Error('COMPARE_TRUE'), | |||
Error('CONFIDENTIALITY_REQUIRED'), | |||
Error('CONNECT_ERROR'), | |||
Error('CONSTRAINT_VIOLATION'), | |||
Error('CONTROL_NOT_FOUND'), | |||
Error('DECODING_ERROR'), | |||
Error('ENCODING_ERROR'), | |||
Error('FILTER_ERROR'), | |||
Error('INAPPROPRIATE_AUTH'), | |||
Error('INAPPROPRIATE_MATCHING'), | |||
Error('INSUFFICIENT_ACCESS'), | |||
Error('INVALID_CREDENTIALS'), | |||
Error('INVALID_DN_SYNTAX'), | |||
Error('INVALID_SYNTAX'), | |||
Error('IS_LEAF'), | |||
Error('LOCAL_ERROR'), | |||
Error('LOOP_DETECT'), | |||
Error('MORE_RESULTS_TO_RETURN'), | |||
Error('NAMING_VIOLATION'), | |||
Error('NO_MEMORY'), | |||
Error('NO_OBJECT_CLASS_MODS'), | |||
Error('NO_OBJECT_CLASS_MODS'), | |||
Error('NO_RESULTS_RETURNED'), | |||
Error('NO_SUCH_ATTRIBUTE'), | |||
Error('NO_SUCH_OBJECT'), | |||
Error('NOT_ALLOWED_ON_NONLEAF'), | |||
Error('NOT_ALLOWED_ON_RDN'), | |||
Error('NOT_SUPPORTED'), | |||
Error('OBJECT_CLASS_VIOLATION'), | |||
Error('OPERATIONS_ERROR'), | |||
Error('OTHER'), | |||
Error('PARAM_ERROR'), | |||
Error('PARTIAL_RESULTS'), | |||
Error('PROTOCOL_ERROR'), | |||
Error('REFERRAL'), | |||
Error('REFERRAL_LIMIT_EXCEEDED'), | |||
Error('RESULTS_TOO_LARGE'), | |||
Error('SASL_BIND_IN_PROGRESS'), | |||
Error('SERVER_DOWN'), | |||
Error('SIZELIMIT_EXCEEDED'), | |||
Error('STRONG_AUTH_NOT_SUPPORTED'), | |||
Error('STRONG_AUTH_REQUIRED'), | |||
Error('SUCCESS'), | |||
Error('TIMELIMIT_EXCEEDED'), | |||
Error('TIMEOUT'), | |||
Error('TYPE_OR_VALUE_EXISTS'), | |||
Error('UNAVAILABLE'), | |||
Error('UNAVAILABLE_CRITICAL_EXTENSION'), | |||
Error('UNDEFINED_TYPE'), | |||
Error('UNWILLING_TO_PERFORM'), | |||
Error('USER_CANCELLED'), | |||
Error('VLV_ERROR'), | |||
Error('X_PROXY_AUTHZ_FAILURE'), | |||
Error('CANCELLED', requirements=['defined(LDAP_API_FEATURE_CANCEL)']), | |||
Error('NO_SUCH_OPERATION', requirements=['defined(LDAP_API_FEATURE_CANCEL)']), | |||
Error('TOO_LATE', requirements=['defined(LDAP_API_FEATURE_CANCEL)']), | |||
Error('CANNOT_CANCEL', requirements=['defined(LDAP_API_FEATURE_CANCEL)']), | |||
Error('ASSERTION_FAILED', optional=True), | |||
Error('PROXIED_AUTHORIZATION_DENIED', optional=True), | |||
# simple constants | |||
Int('API_VERSION'), | |||
Int('VENDOR_VERSION'), | |||
Int('PORT'), | |||
Int('VERSION1'), | |||
Int('VERSION2'), | |||
Int('VERSION3'), | |||
Int('VERSION_MIN'), | |||
Int('VERSION'), | |||
Int('VERSION_MAX'), | |||
Int('TAG_MESSAGE'), | |||
Int('TAG_MSGID'), | |||
Int('REQ_BIND'), | |||
Int('REQ_UNBIND'), | |||
Int('REQ_SEARCH'), | |||
Int('REQ_MODIFY'), | |||
Int('REQ_ADD'), | |||
Int('REQ_DELETE'), | |||
Int('REQ_MODRDN'), | |||
Int('REQ_COMPARE'), | |||
Int('REQ_ABANDON'), | |||
Int('TAG_LDAPDN'), | |||
Int('TAG_LDAPCRED'), | |||
Int('TAG_CONTROLS'), | |||
Int('TAG_REFERRAL'), | |||
Int('REQ_EXTENDED'), | |||
Int('TAG_NEWSUPERIOR', requirements=[API_2004]), | |||
Int('TAG_EXOP_REQ_OID', requirements=[API_2004]), | |||
Int('TAG_EXOP_REQ_VALUE', requirements=[API_2004]), | |||
Int('TAG_EXOP_RES_OID', requirements=[API_2004]), | |||
Int('TAG_EXOP_RES_VALUE', requirements=[API_2004]), | |||
Int('TAG_SASL_RES_CREDS', requirements=[API_2004, 'defined(HAVE_SASL)']), | |||
Int('SASL_AUTOMATIC'), | |||
Int('SASL_INTERACTIVE'), | |||
Int('SASL_QUIET'), | |||
# reversibles | |||
Int('RES_BIND'), | |||
Int('RES_SEARCH_ENTRY'), | |||
Int('RES_SEARCH_RESULT'), | |||
Int('RES_MODIFY'), | |||
Int('RES_ADD'), | |||
Int('RES_DELETE'), | |||
Int('RES_MODRDN'), | |||
Int('RES_COMPARE'), | |||
Int('RES_ANY'), | |||
Int('RES_SEARCH_REFERENCE'), | |||
Int('RES_EXTENDED'), | |||
Int('RES_UNSOLICITED'), | |||
Int('RES_INTERMEDIATE'), | |||
# non-reversibles | |||
Int('AUTH_NONE'), | |||
Int('AUTH_SIMPLE'), | |||
Int('SCOPE_BASE'), | |||
Int('SCOPE_ONELEVEL'), | |||
Int('SCOPE_SUBTREE'), | |||
Int('SCOPE_SUBORDINATE', optional=True), | |||
Int('MOD_ADD'), | |||
Int('MOD_DELETE'), | |||
Int('MOD_REPLACE'), | |||
Int('MOD_INCREMENT'), | |||
Int('MOD_BVALUES'), | |||
Int('MSG_ONE'), | |||
Int('MSG_ALL'), | |||
Int('MSG_RECEIVED'), | |||
# (error constants handled above) | |||
Int('DEREF_NEVER'), | |||
Int('DEREF_SEARCHING'), | |||
Int('DEREF_FINDING'), | |||
Int('DEREF_ALWAYS'), | |||
Int('NO_LIMIT'), | |||
Int('OPT_API_INFO'), | |||
Int('OPT_DEREF'), | |||
Int('OPT_SIZELIMIT'), | |||
Int('OPT_TIMELIMIT'), | |||
Int('OPT_REFERRALS', optional=True), | |||
Int('OPT_ERROR_NUMBER'), | |||
Int('OPT_RESTART'), | |||
Int('OPT_PROTOCOL_VERSION'), | |||
Int('OPT_SERVER_CONTROLS'), | |||
Int('OPT_CLIENT_CONTROLS'), | |||
Int('OPT_API_FEATURE_INFO'), | |||
Int('OPT_HOST_NAME'), | |||
Int('OPT_DESC'), | |||
Int('OPT_DIAGNOSTIC_MESSAGE'), | |||
Int('OPT_ERROR_STRING'), | |||
Int('OPT_MATCHED_DN'), | |||
Int('OPT_DEBUG_LEVEL'), | |||
Int('OPT_TIMEOUT'), | |||
Int('OPT_REFHOPLIMIT'), | |||
Int('OPT_NETWORK_TIMEOUT'), | |||
Int('OPT_URI'), | |||
Int('OPT_DEFBASE', optional=True), | |||
TLSInt('OPT_X_TLS', optional=True), | |||
TLSInt('OPT_X_TLS_CTX'), | |||
TLSInt('OPT_X_TLS_CACERTFILE'), | |||
TLSInt('OPT_X_TLS_CACERTDIR'), | |||
TLSInt('OPT_X_TLS_CERTFILE'), | |||
TLSInt('OPT_X_TLS_KEYFILE'), | |||
TLSInt('OPT_X_TLS_REQUIRE_CERT'), | |||
TLSInt('OPT_X_TLS_CIPHER_SUITE'), | |||
TLSInt('OPT_X_TLS_RANDOM_FILE'), | |||
TLSInt('OPT_X_TLS_DHFILE'), | |||
TLSInt('OPT_X_TLS_NEVER'), | |||
TLSInt('OPT_X_TLS_HARD'), | |||
TLSInt('OPT_X_TLS_DEMAND'), | |||
TLSInt('OPT_X_TLS_ALLOW'), | |||
TLSInt('OPT_X_TLS_TRY'), | |||
TLSInt('OPT_X_TLS_PEERCERT', optional=True), | |||
TLSInt('OPT_X_TLS_VERSION', optional=True), | |||
TLSInt('OPT_X_TLS_CIPHER', optional=True), | |||
TLSInt('OPT_X_TLS_PEERCERT', optional=True), | |||
# only available if OpenSSL supports it => might cause | |||
# backward compatibility problems | |||
TLSInt('OPT_X_TLS_CRLCHECK', optional=True), | |||
TLSInt('OPT_X_TLS_CRLFILE', optional=True), | |||
TLSInt('OPT_X_TLS_CRL_NONE'), | |||
TLSInt('OPT_X_TLS_CRL_PEER'), | |||
TLSInt('OPT_X_TLS_CRL_ALL'), | |||
TLSInt('OPT_X_TLS_NEWCTX', optional=True), | |||
TLSInt('OPT_X_TLS_PROTOCOL_MIN', optional=True), | |||
TLSInt('OPT_X_TLS_PACKAGE', optional=True), | |||
Int('OPT_X_SASL_MECH'), | |||
Int('OPT_X_SASL_REALM'), | |||
Int('OPT_X_SASL_AUTHCID'), | |||
Int('OPT_X_SASL_AUTHZID'), | |||
Int('OPT_X_SASL_SSF'), | |||
Int('OPT_X_SASL_SSF_EXTERNAL'), | |||
Int('OPT_X_SASL_SECPROPS'), | |||
Int('OPT_X_SASL_SSF_MIN'), | |||
Int('OPT_X_SASL_SSF_MAX'), | |||
Int('OPT_X_SASL_NOCANON', optional=True), | |||
Int('OPT_X_SASL_USERNAME', optional=True), | |||
Int('OPT_CONNECT_ASYNC', optional=True), | |||
Int('OPT_X_KEEPALIVE_IDLE', optional=True), | |||
Int('OPT_X_KEEPALIVE_PROBES', optional=True), | |||
Int('OPT_X_KEEPALIVE_INTERVAL', optional=True), | |||
Int('DN_FORMAT_LDAP'), | |||
Int('DN_FORMAT_LDAPV3'), | |||
Int('DN_FORMAT_LDAPV2'), | |||
Int('DN_FORMAT_DCE'), | |||
Int('DN_FORMAT_UFN'), | |||
Int('DN_FORMAT_AD_CANONICAL'), | |||
# Int('DN_FORMAT_LBER'), # for testing only | |||
Int('DN_FORMAT_MASK'), | |||
Int('DN_PRETTY'), | |||
Int('DN_SKIP'), | |||
Int('DN_P_NOLEADTRAILSPACES'), | |||
Int('DN_P_NOSPACEAFTERRDN'), | |||
Int('DN_PEDANTIC'), | |||
Int('AVA_NULL'), | |||
Int('AVA_STRING'), | |||
Int('AVA_BINARY'), | |||
Int('AVA_NONPRINTABLE'), | |||
Int('OPT_SUCCESS'), | |||
# XXX - these should be errors | |||
Int('URL_ERR_BADSCOPE'), | |||
Int('URL_ERR_MEM'), | |||
# Int('LIBLDAP_R'), | |||
Feature('LIBLDAP_R', 'HAVE_LIBLDAP_R'), | |||
Feature('SASL_AVAIL', 'HAVE_SASL'), | |||
Feature('TLS_AVAIL', 'HAVE_TLS'), | |||
Str("CONTROL_MANAGEDSAIT"), | |||
Str("CONTROL_PROXY_AUTHZ"), | |||
Str("CONTROL_SUBENTRIES"), | |||
Str("CONTROL_VALUESRETURNFILTER"), | |||
Str("CONTROL_ASSERT"), | |||
Str("CONTROL_PRE_READ"), | |||
Str("CONTROL_POST_READ"), | |||
Str("CONTROL_SORTREQUEST"), | |||
Str("CONTROL_SORTRESPONSE"), | |||
Str("CONTROL_PAGEDRESULTS"), | |||
Str("CONTROL_SYNC"), | |||
Str("CONTROL_SYNC_STATE"), | |||
Str("CONTROL_SYNC_DONE"), | |||
Str("SYNC_INFO"), | |||
Str("CONTROL_PASSWORDPOLICYREQUEST"), | |||
Str("CONTROL_PASSWORDPOLICYRESPONSE"), | |||
Str("CONTROL_RELAX"), | |||
) | |||
def print_header(): # pragma: no cover | |||
"""Print the C header file to standard output""" | |||
print('/*') | |||
print(' * Generated with:') | |||
print(' * python Lib/ldap/constants.py > Modules/constants_generated.h') | |||
print(' *') | |||
print(' * Please do any modifications there, then re-generate this file') | |||
print(' */') | |||
print('') | |||
current_requirements = [] | |||
def pop_requirement(): | |||
popped = current_requirements.pop() | |||
print('#endif') | |||
print() | |||
for definition in CONSTANTS: | |||
while not set(current_requirements).issubset(definition.requirements): | |||
pop_requirement() | |||
for requirement in definition.requirements: | |||
if requirement not in current_requirements: | |||
current_requirements.append(requirement) | |||
print() | |||
print('#if {}'.format(requirement)) | |||
print(definition.c_template.format(self=definition)) | |||
while current_requirements: | |||
pop_requirement() | |||
if __name__ == '__main__': | |||
print_header() |
@@ -0,0 +1,158 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
controls.py - support classes for LDAP controls | |||
See https://www.python-ldap.org/ for details. | |||
Description: | |||
The ldap.controls module provides LDAPControl classes. | |||
Each class provides support for a certain control. | |||
""" | |||
from ldap.pkginfo import __version__ | |||
import _ldap | |||
assert _ldap.__version__==__version__, \ | |||
ImportError('ldap %s and _ldap %s version mismatch!' % (__version__,_ldap.__version__)) | |||
import ldap | |||
from pyasn1.error import PyAsn1Error | |||
__all__ = [ | |||
'KNOWN_RESPONSE_CONTROLS', | |||
# Classes | |||
'AssertionControl', | |||
'BooleanControl', | |||
'LDAPControl', | |||
'ManageDSAITControl', | |||
'MatchedValuesControl', | |||
'RelaxRulesControl', | |||
'RequestControl', | |||
'ResponseControl', | |||
'SimplePagedResultsControl', | |||
'ValueLessRequestControl', | |||
# Functions | |||
'RequestControlTuples', | |||
'DecodeControlTuples', | |||
] | |||
# response control OID to class registry | |||
KNOWN_RESPONSE_CONTROLS = {} | |||
class RequestControl: | |||
""" | |||
Base class for all request controls | |||
controlType | |||
OID as string of the LDAPv3 extended request control | |||
criticality | |||
sets the criticality of the control (boolean) | |||
encodedControlValue | |||
control value of the LDAPv3 extended request control | |||
(here it is the BER-encoded ASN.1 control value) | |||
""" | |||
def __init__(self,controlType=None,criticality=False,encodedControlValue=None): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
self.encodedControlValue = encodedControlValue | |||
def encodeControlValue(self): | |||
""" | |||
sets class attribute encodedControlValue to the BER-encoded ASN.1 | |||
control value composed by class attributes set before | |||
""" | |||
return self.encodedControlValue | |||
class ResponseControl: | |||
""" | |||
Base class for all response controls | |||
controlType | |||
OID as string of the LDAPv3 extended response control | |||
criticality | |||
sets the criticality of the received control (boolean) | |||
""" | |||
def __init__(self,controlType=None,criticality=False): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
def decodeControlValue(self,encodedControlValue): | |||
""" | |||
decodes the BER-encoded ASN.1 control value and sets the appropriate | |||
class attributes | |||
""" | |||
self.encodedControlValue = encodedControlValue | |||
class LDAPControl(RequestControl,ResponseControl): | |||
""" | |||
Base class for combined request/response controls mainly | |||
for backward-compatibility to python-ldap 2.3.x | |||
""" | |||
def __init__(self,controlType=None,criticality=False,controlValue=None,encodedControlValue=None): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
self.controlValue = controlValue | |||
self.encodedControlValue = encodedControlValue | |||
def RequestControlTuples(ldapControls): | |||
""" | |||
Return list of readily encoded 3-tuples which can be directly | |||
passed to C module _ldap | |||
ldapControls | |||
sequence-type of RequestControl objects | |||
""" | |||
if ldapControls is None: | |||
return None | |||
else: | |||
result = [ | |||
(c.controlType,c.criticality,c.encodeControlValue()) | |||
for c in ldapControls | |||
] | |||
return result | |||
def DecodeControlTuples(ldapControlTuples,knownLDAPControls=None): | |||
""" | |||
Returns list of readily decoded ResponseControl objects | |||
ldapControlTuples | |||
Sequence-type of 3-tuples returned by _ldap.result4() containing | |||
the encoded ASN.1 control values of response controls. | |||
knownLDAPControls | |||
Dictionary mapping extended control's OID to ResponseControl class | |||
of response controls known by the application. If None | |||
ldap.controls.KNOWN_RESPONSE_CONTROLS is used here. | |||
""" | |||
knownLDAPControls = knownLDAPControls or KNOWN_RESPONSE_CONTROLS | |||
result = [] | |||
for controlType,criticality,encodedControlValue in ldapControlTuples or []: | |||
try: | |||
control = knownLDAPControls[controlType]() | |||
except KeyError: | |||
if criticality: | |||
raise ldap.UNAVAILABLE_CRITICAL_EXTENSION('Received unexpected critical response control with controlType %s' % (repr(controlType))) | |||
else: | |||
control.controlType,control.criticality = controlType,criticality | |||
try: | |||
control.decodeControlValue(encodedControlValue) | |||
except PyAsn1Error: | |||
if criticality: | |||
raise | |||
else: | |||
result.append(control) | |||
return result | |||
# Import the standard sub-modules | |||
from ldap.controls.simple import * | |||
from ldap.controls.libldap import * |
@@ -0,0 +1,119 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.deref - classes for | |||
(see https://tools.ietf.org/html/draft-masarati-ldap-deref) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'DEREF_CONTROL_OID', | |||
'DereferenceControl', | |||
] | |||
import ldap.controls | |||
from ldap.controls import LDAPControl,KNOWN_RESPONSE_CONTROLS | |||
import pyasn1_modules.rfc2251 | |||
from pyasn1.type import namedtype,univ,tag | |||
from pyasn1.codec.ber import encoder,decoder | |||
from pyasn1_modules.rfc2251 import LDAPDN,AttributeDescription,AttributeDescriptionList,AttributeValue | |||
DEREF_CONTROL_OID = '1.3.6.1.4.1.4203.666.5.16' | |||
# Request types | |||
#--------------------------------------------------------------------------- | |||
# For compatibility with ASN.1 declaration in I-D | |||
AttributeList = AttributeDescriptionList | |||
class DerefSpec(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType( | |||
'derefAttr', | |||
AttributeDescription() | |||
), | |||
namedtype.NamedType( | |||
'attributes', | |||
AttributeList() | |||
), | |||
) | |||
class DerefSpecs(univ.SequenceOf): | |||
componentType = DerefSpec() | |||
# Response types | |||
#--------------------------------------------------------------------------- | |||
class AttributeValues(univ.SetOf): | |||
componentType = AttributeValue() | |||
class PartialAttribute(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('type', AttributeDescription()), | |||
namedtype.NamedType('vals', AttributeValues()), | |||
) | |||
class PartialAttributeList(univ.SequenceOf): | |||
componentType = PartialAttribute() | |||
tagSet = univ.Sequence.tagSet.tagImplicitly( | |||
tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,0) | |||
) | |||
class DerefRes(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('derefAttr', AttributeDescription()), | |||
namedtype.NamedType('derefVal', LDAPDN()), | |||
namedtype.OptionalNamedType('attrVals', PartialAttributeList()), | |||
) | |||
class DerefResultControlValue(univ.SequenceOf): | |||
componentType = DerefRes() | |||
class DereferenceControl(LDAPControl): | |||
controlType = DEREF_CONTROL_OID | |||
def __init__(self,criticality=False,derefSpecs=None): | |||
LDAPControl.__init__(self,self.controlType,criticality) | |||
self.derefSpecs = derefSpecs or {} | |||
def _derefSpecs(self): | |||
deref_specs = DerefSpecs() | |||
i = 0 | |||
for deref_attr,deref_attribute_names in self.derefSpecs.items(): | |||
deref_spec = DerefSpec() | |||
deref_attributes = AttributeList() | |||
for j in range(len(deref_attribute_names)): | |||
deref_attributes.setComponentByPosition(j,deref_attribute_names[j]) | |||
deref_spec.setComponentByName('derefAttr',AttributeDescription(deref_attr)) | |||
deref_spec.setComponentByName('attributes',deref_attributes) | |||
deref_specs.setComponentByPosition(i,deref_spec) | |||
i += 1 | |||
return deref_specs | |||
def encodeControlValue(self): | |||
return encoder.encode(self._derefSpecs()) | |||
def decodeControlValue(self,encodedControlValue): | |||
decodedValue,_ = decoder.decode(encodedControlValue,asn1Spec=DerefResultControlValue()) | |||
self.derefRes = {} | |||
for deref_res in decodedValue: | |||
deref_attr,deref_val,deref_vals = deref_res[0],deref_res[1],deref_res[2] | |||
partial_attrs_dict = { | |||
str(tv[0]): [str(v) for v in tv[1]] | |||
for tv in deref_vals or [] | |||
} | |||
try: | |||
self.derefRes[str(deref_attr)].append((str(deref_val),partial_attrs_dict)) | |||
except KeyError: | |||
self.derefRes[str(deref_attr)] = [(str(deref_val),partial_attrs_dict)] | |||
KNOWN_RESPONSE_CONTROLS[DereferenceControl.controlType] = DereferenceControl |
@@ -0,0 +1,82 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
controls.libldap - LDAP controls wrapper classes with en-/decoding done | |||
by OpenLDAP functions | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap.pkginfo import __version__ | |||
import _ldap | |||
assert _ldap.__version__==__version__, \ | |||
ImportError('ldap %s and _ldap %s version mismatch!' % (__version__,_ldap.__version__)) | |||
import ldap | |||
from ldap.controls import RequestControl,LDAPControl,KNOWN_RESPONSE_CONTROLS | |||
class AssertionControl(RequestControl): | |||
""" | |||
LDAP Assertion control, as defined in RFC 4528 | |||
filterstr | |||
LDAP filter string specifying which assertions have to match | |||
so that the server processes the operation | |||
""" | |||
controlType = ldap.CONTROL_ASSERT | |||
def __init__(self,criticality=True,filterstr='(objectClass=*)'): | |||
self.criticality = criticality | |||
self.filterstr = filterstr | |||
def encodeControlValue(self): | |||
return _ldap.encode_assertion_control(self.filterstr) | |||
KNOWN_RESPONSE_CONTROLS[ldap.CONTROL_ASSERT] = AssertionControl | |||
class MatchedValuesControl(RequestControl): | |||
""" | |||
LDAP Matched Values control, as defined in RFC 3876 | |||
filterstr | |||
LDAP filter string specifying which attribute values | |||
should be returned | |||
""" | |||
controlType = ldap.CONTROL_VALUESRETURNFILTER | |||
def __init__(self,criticality=False,filterstr='(objectClass=*)'): | |||
self.criticality = criticality | |||
self.filterstr = filterstr | |||
def encodeControlValue(self): | |||
return _ldap.encode_valuesreturnfilter_control(self.filterstr) | |||
KNOWN_RESPONSE_CONTROLS[ldap.CONTROL_VALUESRETURNFILTER] = MatchedValuesControl | |||
class SimplePagedResultsControl(LDAPControl): | |||
""" | |||
LDAP Control Extension for Simple Paged Results Manipulation | |||
size | |||
Page size requested (number of entries to be returned) | |||
cookie | |||
Cookie string received with last page | |||
""" | |||
controlType = ldap.CONTROL_PAGEDRESULTS | |||
def __init__(self,criticality=False,size=None,cookie=None): | |||
self.criticality = criticality | |||
self.size,self.cookie = size,cookie | |||
def encodeControlValue(self): | |||
return _ldap.encode_page_control(self.size,self.cookie) | |||
def decodeControlValue(self,encodedControlValue): | |||
self.size,self.cookie = _ldap.decode_page_control(encodedControlValue) | |||
KNOWN_RESPONSE_CONTROLS[ldap.CONTROL_PAGEDRESULTS] = SimplePagedResultsControl |
@@ -0,0 +1,82 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.openldap - classes for OpenLDAP-specific controls | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
import ldap.controls | |||
from ldap.controls import ValueLessRequestControl,ResponseControl | |||
from pyasn1.type import univ | |||
from pyasn1.codec.ber import decoder | |||
__all__ = [ | |||
'SearchNoOpControl', | |||
'SearchNoOpMixIn', | |||
] | |||
class SearchNoOpControl(ValueLessRequestControl,ResponseControl): | |||
""" | |||
No-op control attached to search operations implementing sort of a | |||
count operation | |||
see https://www.openldap.org/its/index.cgi?findid=6598 | |||
""" | |||
controlType = '1.3.6.1.4.1.4203.666.5.18' | |||
def __init__(self,criticality=False): | |||
self.criticality = criticality | |||
class SearchNoOpControlValue(univ.Sequence): | |||
pass | |||
def decodeControlValue(self,encodedControlValue): | |||
decodedValue,_ = decoder.decode(encodedControlValue,asn1Spec=self.SearchNoOpControlValue()) | |||
self.resultCode = int(decodedValue[0]) | |||
self.numSearchResults = int(decodedValue[1]) | |||
self.numSearchContinuations = int(decodedValue[2]) | |||
ldap.controls.KNOWN_RESPONSE_CONTROLS[SearchNoOpControl.controlType] = SearchNoOpControl | |||
class SearchNoOpMixIn: | |||
""" | |||
Mix-in class to be used with class LDAPObject and friends. | |||
It adds a convenience method noop_search_st() to LDAPObject | |||
for easily using the no-op search control. | |||
""" | |||
def noop_search_st(self,base,scope=ldap.SCOPE_SUBTREE,filterstr='(objectClass=*)',timeout=-1): | |||
try: | |||
msg_id = self.search_ext( | |||
base, | |||
scope, | |||
filterstr=filterstr, | |||
attrlist=['1.1'], | |||
timeout=timeout, | |||
serverctrls=[SearchNoOpControl(criticality=True)], | |||
) | |||
_,_,_,search_response_ctrls = self.result3(msg_id,all=1,timeout=timeout) | |||
except ( | |||
ldap.TIMEOUT, | |||
ldap.TIMELIMIT_EXCEEDED, | |||
ldap.SIZELIMIT_EXCEEDED, | |||
ldap.ADMINLIMIT_EXCEEDED | |||
) as e: | |||
self.abandon(msg_id) | |||
raise e | |||
else: | |||
noop_srch_ctrl = [ | |||
c | |||
for c in search_response_ctrls | |||
if c.controlType==SearchNoOpControl.controlType | |||
] | |||
if noop_srch_ctrl: | |||
return noop_srch_ctrl[0].numSearchResults,noop_srch_ctrl[0].numSearchContinuations | |||
else: | |||
return (None,None) |
@@ -0,0 +1,50 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.paged - classes for Simple Paged control | |||
(see RFC 2696) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'SimplePagedResultsControl' | |||
] | |||
# Imports from python-ldap 2.4+ | |||
import ldap.controls | |||
from ldap.controls import RequestControl,ResponseControl,KNOWN_RESPONSE_CONTROLS | |||
# Imports from pyasn1 | |||
from pyasn1.type import tag,namedtype,univ,constraint | |||
from pyasn1.codec.ber import encoder,decoder | |||
from pyasn1_modules.rfc2251 import LDAPString | |||
class PagedResultsControlValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('size',univ.Integer()), | |||
namedtype.NamedType('cookie',LDAPString()), | |||
) | |||
class SimplePagedResultsControl(RequestControl,ResponseControl): | |||
controlType = '1.2.840.113556.1.4.319' | |||
def __init__(self,criticality=False,size=10,cookie=''): | |||
self.criticality = criticality | |||
self.size = size | |||
self.cookie = cookie or '' | |||
def encodeControlValue(self): | |||
pc = PagedResultsControlValue() | |||
pc.setComponentByName('size',univ.Integer(self.size)) | |||
pc.setComponentByName('cookie',LDAPString(self.cookie)) | |||
return encoder.encode(pc) | |||
def decodeControlValue(self,encodedControlValue): | |||
decodedValue,_ = decoder.decode(encodedControlValue,asn1Spec=PagedResultsControlValue()) | |||
self.size = int(decodedValue.getComponentByName('size')) | |||
self.cookie = bytes(decodedValue.getComponentByName('cookie')) | |||
KNOWN_RESPONSE_CONTROLS[SimplePagedResultsControl.controlType] = SimplePagedResultsControl |
@@ -0,0 +1,91 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.ppolicy - classes for Password Policy controls | |||
(see https://tools.ietf.org/html/draft-behera-ldap-password-policy) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'PasswordPolicyControl' | |||
] | |||
# Imports from python-ldap 2.4+ | |||
from ldap.controls import ( | |||
ResponseControl, ValueLessRequestControl, KNOWN_RESPONSE_CONTROLS | |||
) | |||
# Imports from pyasn1 | |||
from pyasn1.type import tag,namedtype,namedval,univ,constraint | |||
from pyasn1.codec.der import decoder | |||
class PasswordPolicyWarning(univ.Choice): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('timeBeforeExpiration',univ.Integer().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,0) | |||
)), | |||
namedtype.NamedType('graceAuthNsRemaining',univ.Integer().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,1) | |||
)), | |||
) | |||
class PasswordPolicyError(univ.Enumerated): | |||
namedValues = namedval.NamedValues( | |||
('passwordExpired',0), | |||
('accountLocked',1), | |||
('changeAfterReset',2), | |||
('passwordModNotAllowed',3), | |||
('mustSupplyOldPassword',4), | |||
('insufficientPasswordQuality',5), | |||
('passwordTooShort',6), | |||
('passwordTooYoung',7), | |||
('passwordInHistory',8) | |||
) | |||
subtypeSpec = univ.Enumerated.subtypeSpec + constraint.SingleValueConstraint(0,1,2,3,4,5,6,7,8) | |||
class PasswordPolicyResponseValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.OptionalNamedType( | |||
'warning', | |||
PasswordPolicyWarning().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,0) | |||
), | |||
), | |||
namedtype.OptionalNamedType( | |||
'error',PasswordPolicyError().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,1) | |||
) | |||
), | |||
) | |||
class PasswordPolicyControl(ValueLessRequestControl,ResponseControl): | |||
controlType = '1.3.6.1.4.1.42.2.27.8.5.1' | |||
def __init__(self,criticality=False): | |||
self.criticality = criticality | |||
def decodeControlValue(self,encodedControlValue): | |||
ppolicyValue,_ = decoder.decode(encodedControlValue,asn1Spec=PasswordPolicyResponseValue()) | |||
self.timeBeforeExpiration = None | |||
self.graceAuthNsRemaining = None | |||
self.error = None | |||
warning = ppolicyValue.getComponentByName('warning') | |||
if warning.hasValue(): | |||
if 'timeBeforeExpiration' in warning: | |||
self.timeBeforeExpiration = int( | |||
warning.getComponentByName('timeBeforeExpiration')) | |||
if 'graceAuthNsRemaining' in warning: | |||
self.graceAuthNsRemaining = int( | |||
warning.getComponentByName('graceAuthNsRemaining')) | |||
error = ppolicyValue.getComponentByName('error') | |||
if error.hasValue(): | |||
self.error = int(error) | |||
KNOWN_RESPONSE_CONTROLS[PasswordPolicyControl.controlType] = PasswordPolicyControl |
@@ -0,0 +1,130 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.psearch - classes for Persistent Search Control | |||
(see https://tools.ietf.org/html/draft-ietf-ldapext-psearch) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'PersistentSearchControl', | |||
'EntryChangeNotificationControl', | |||
'CHANGE_TYPES_INT', | |||
'CHANGE_TYPES_STR', | |||
] | |||
# Imports from python-ldap 2.4+ | |||
import ldap.controls | |||
from ldap.controls import RequestControl,ResponseControl,KNOWN_RESPONSE_CONTROLS | |||
# Imports from pyasn1 | |||
from pyasn1.type import namedtype,namedval,univ,constraint | |||
from pyasn1.codec.ber import encoder,decoder | |||
from pyasn1_modules.rfc2251 import LDAPDN | |||
#--------------------------------------------------------------------------- | |||
# Constants and classes for Persistent Search Control | |||
#--------------------------------------------------------------------------- | |||
CHANGE_TYPES_INT = { | |||
'add':1, | |||
'delete':2, | |||
'modify':4, | |||
'modDN':8, | |||
} | |||
CHANGE_TYPES_STR = {v: k for k,v in CHANGE_TYPES_INT.items()} | |||
class PersistentSearchControl(RequestControl): | |||
""" | |||
Implements the request control for persistent search. | |||
changeTypes | |||
List of strings specifying the types of changes returned by the server. | |||
Setting to None requests all changes. | |||
changesOnly | |||
Boolean which indicates whether only changes are returned by the server. | |||
returnECs | |||
Boolean which indicates whether the server should return an | |||
Entry Change Notification response control | |||
""" | |||
class PersistentSearchControlValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('changeTypes',univ.Integer()), | |||
namedtype.NamedType('changesOnly',univ.Boolean()), | |||
namedtype.NamedType('returnECs',univ.Boolean()), | |||
) | |||
controlType = "2.16.840.1.113730.3.4.3" | |||
def __init__(self,criticality=True,changeTypes=None,changesOnly=False,returnECs=True): | |||
self.criticality,self.changesOnly,self.returnECs = \ | |||
criticality,changesOnly,returnECs | |||
self.changeTypes = changeTypes or CHANGE_TYPES_INT.values() | |||
def encodeControlValue(self): | |||
if not type(self.changeTypes)==type(0): | |||
# Assume a sequence type of integers to be OR-ed | |||
changeTypes_int = 0 | |||
for ct in self.changeTypes: | |||
changeTypes_int = changeTypes_int|CHANGE_TYPES_INT.get(ct,ct) | |||
self.changeTypes = changeTypes_int | |||
p = self.PersistentSearchControlValue() | |||
p.setComponentByName('changeTypes',univ.Integer(self.changeTypes)) | |||
p.setComponentByName('changesOnly',univ.Boolean(self.changesOnly)) | |||
p.setComponentByName('returnECs',univ.Boolean(self.returnECs)) | |||
return encoder.encode(p) | |||
class ChangeType(univ.Enumerated): | |||
namedValues = namedval.NamedValues( | |||
('add',1), | |||
('delete',2), | |||
('modify',4), | |||
('modDN',8), | |||
) | |||
subtypeSpec = univ.Enumerated.subtypeSpec + constraint.SingleValueConstraint(1,2,4,8) | |||
class EntryChangeNotificationValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('changeType',ChangeType()), | |||
namedtype.OptionalNamedType('previousDN', LDAPDN()), | |||
namedtype.OptionalNamedType('changeNumber',univ.Integer()), | |||
) | |||
class EntryChangeNotificationControl(ResponseControl): | |||
""" | |||
Implements the response control for persistent search. | |||
Class attributes with values extracted from the response control: | |||
changeType | |||
String indicating the type of change causing this result to be | |||
returned by the server | |||
previousDN | |||
Old DN of the entry in case of a modrdn change | |||
changeNumber | |||
A change serial number returned by the server (optional). | |||
""" | |||
controlType = "2.16.840.1.113730.3.4.7" | |||
def decodeControlValue(self,encodedControlValue): | |||
ecncValue,_ = decoder.decode(encodedControlValue,asn1Spec=EntryChangeNotificationValue()) | |||
self.changeType = int(ecncValue.getComponentByName('changeType')) | |||
previousDN = ecncValue.getComponentByName('previousDN') | |||
if previousDN.hasValue(): | |||
self.previousDN = str(previousDN) | |||
else: | |||
self.previousDN = None | |||
changeNumber = ecncValue.getComponentByName('changeNumber') | |||
if changeNumber.hasValue(): | |||
self.changeNumber = int(changeNumber) | |||
else: | |||
self.changeNumber = None | |||
return (self.changeType,self.previousDN,self.changeNumber) | |||
KNOWN_RESPONSE_CONTROLS[EntryChangeNotificationControl.controlType] = EntryChangeNotificationControl |
@@ -0,0 +1,40 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.pwdpolicy - classes for Password Policy controls | |||
(see https://tools.ietf.org/html/draft-vchu-ldap-pwd-policy) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'PasswordExpiringControl', | |||
'PasswordExpiredControl', | |||
] | |||
# Imports from python-ldap 2.4+ | |||
import ldap.controls | |||
from ldap.controls import RequestControl,ResponseControl,ValueLessRequestControl,KNOWN_RESPONSE_CONTROLS | |||
class PasswordExpiringControl(ResponseControl): | |||
""" | |||
Indicates time in seconds when password will expire | |||
""" | |||
controlType = '2.16.840.1.113730.3.4.5' | |||
def decodeControlValue(self,encodedControlValue): | |||
self.gracePeriod = int(encodedControlValue) | |||
KNOWN_RESPONSE_CONTROLS[PasswordExpiringControl.controlType] = PasswordExpiringControl | |||
class PasswordExpiredControl(ResponseControl): | |||
""" | |||
Indicates that password is expired | |||
""" | |||
controlType = '2.16.840.1.113730.3.4.4' | |||
def decodeControlValue(self,encodedControlValue): | |||
self.passwordExpired = encodedControlValue=='0' | |||
KNOWN_RESPONSE_CONTROLS[PasswordExpiredControl.controlType] = PasswordExpiredControl |
@@ -0,0 +1,88 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.readentry - classes for the Read Entry controls | |||
(see RFC 4527) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
import ldap | |||
from pyasn1.codec.ber import encoder,decoder | |||
from ldap.controls import LDAPControl,KNOWN_RESPONSE_CONTROLS | |||
from pyasn1_modules.rfc2251 import AttributeDescriptionList,SearchResultEntry | |||
class ReadEntryControl(LDAPControl): | |||
""" | |||
Base class for read entry control described in RFC 4527 | |||
attrList | |||
list of attribute type names requested | |||
Class attributes with values extracted from the response control: | |||
dn | |||
string holding the distinguished name of the LDAP entry | |||
entry | |||
dictionary holding the LDAP entry | |||
""" | |||
def __init__(self,criticality=False,attrList=None): | |||
self.criticality,self.attrList,self.entry = criticality,attrList or [],None | |||
def encodeControlValue(self): | |||
attributeSelection = AttributeDescriptionList() | |||
for i in range(len(self.attrList)): | |||
attributeSelection.setComponentByPosition(i,self.attrList[i]) | |||
return encoder.encode(attributeSelection) | |||
def decodeControlValue(self,encodedControlValue): | |||
decodedEntry,_ = decoder.decode(encodedControlValue,asn1Spec=SearchResultEntry()) | |||
self.dn = str(decodedEntry[0]) | |||
self.entry = {} | |||
for attr in decodedEntry[1]: | |||
self.entry[str(attr[0])] = [ str(attr_value) for attr_value in attr[1] ] | |||
class PreReadControl(ReadEntryControl): | |||
""" | |||
Class for pre-read control described in RFC 4527 | |||
attrList | |||
list of attribute type names requested | |||
Class attributes with values extracted from the response control: | |||
dn | |||
string holding the distinguished name of the LDAP entry | |||
before the operation was done by the server | |||
entry | |||
dictionary holding the LDAP entry | |||
before the operation was done by the server | |||
""" | |||
controlType = ldap.CONTROL_PRE_READ | |||
KNOWN_RESPONSE_CONTROLS[PreReadControl.controlType] = PreReadControl | |||
class PostReadControl(ReadEntryControl): | |||
""" | |||
Class for post-read control described in RFC 4527 | |||
attrList | |||
list of attribute type names requested | |||
Class attributes with values extracted from the response control: | |||
dn | |||
string holding the distinguished name of the LDAP entry | |||
after the operation was done by the server | |||
entry | |||
dictionary holding the LDAP entry | |||
after the operation was done by the server | |||
""" | |||
controlType = ldap.CONTROL_POST_READ | |||
KNOWN_RESPONSE_CONTROLS[PostReadControl.controlType] = PostReadControl |
@@ -0,0 +1,62 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.sessiontrack - class for session tracking control | |||
(see draft-wahl-ldap-session) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
from ldap.controls import RequestControl | |||
from pyasn1.type import namedtype,univ | |||
from pyasn1.codec.ber import encoder | |||
from pyasn1_modules.rfc2251 import LDAPString,LDAPOID | |||
# OID constants | |||
SESSION_TRACKING_CONTROL_OID = "1.3.6.1.4.1.21008.108.63.1" | |||
SESSION_TRACKING_FORMAT_OID_RADIUS_ACCT_SESSION_ID = SESSION_TRACKING_CONTROL_OID+".1" | |||
SESSION_TRACKING_FORMAT_OID_RADIUS_ACCT_MULTI_SESSION_ID = SESSION_TRACKING_CONTROL_OID+".2" | |||
SESSION_TRACKING_FORMAT_OID_USERNAME = SESSION_TRACKING_CONTROL_OID+".3" | |||
class SessionTrackingControl(RequestControl): | |||
""" | |||
Class for Session Tracking Control | |||
Because criticality MUST be false for this control it cannot be set | |||
from the application. | |||
sessionSourceIp | |||
IP address of the request source as string | |||
sessionSourceName | |||
Name of the request source as string | |||
formatOID | |||
OID as string specifying the format | |||
sessionTrackingIdentifier | |||
String containing a specific tracking ID | |||
""" | |||
class SessionIdentifierControlValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('sessionSourceIp',LDAPString()), | |||
namedtype.NamedType('sessionSourceName',LDAPString()), | |||
namedtype.NamedType('formatOID',LDAPOID()), | |||
namedtype.NamedType('sessionTrackingIdentifier',LDAPString()), | |||
) | |||
controlType = SESSION_TRACKING_CONTROL_OID | |||
def __init__(self,sessionSourceIp,sessionSourceName,formatOID,sessionTrackingIdentifier): | |||
# criticality MUST be false for this control | |||
self.criticality = False | |||
self.sessionSourceIp,self.sessionSourceName,self.formatOID,self.sessionTrackingIdentifier = \ | |||
sessionSourceIp,sessionSourceName,formatOID,sessionTrackingIdentifier | |||
def encodeControlValue(self): | |||
s = self.SessionIdentifierControlValue() | |||
s.setComponentByName('sessionSourceIp',LDAPString(self.sessionSourceIp)) | |||
s.setComponentByName('sessionSourceName',LDAPString(self.sessionSourceName)) | |||
s.setComponentByName('formatOID',LDAPOID(self.formatOID)) | |||
s.setComponentByName('sessionTrackingIdentifier',LDAPString(self.sessionTrackingIdentifier)) | |||
return encoder.encode(s) |
@@ -0,0 +1,145 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.simple - classes for some very simple LDAP controls | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import struct,ldap | |||
from ldap.controls import RequestControl,ResponseControl,LDAPControl,KNOWN_RESPONSE_CONTROLS | |||
class ValueLessRequestControl(RequestControl): | |||
""" | |||
Base class for controls without a controlValue. | |||
The presence of the control in a LDAPv3 request changes the server's | |||
behaviour when processing the request simply based on the controlType. | |||
controlType | |||
OID of the request control | |||
criticality | |||
criticality request control | |||
""" | |||
def __init__(self,controlType=None,criticality=False): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
def encodeControlValue(self): | |||
return None | |||
class OctetStringInteger(LDAPControl): | |||
""" | |||
Base class with controlValue being unsigend integer values | |||
integerValue | |||
Integer to be sent as OctetString | |||
""" | |||
def __init__(self,controlType=None,criticality=False,integerValue=None): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
self.integerValue = integerValue | |||
def encodeControlValue(self): | |||
return struct.pack('!Q',self.integerValue) | |||
def decodeControlValue(self,encodedControlValue): | |||
self.integerValue = struct.unpack('!Q',encodedControlValue)[0] | |||
class BooleanControl(LDAPControl): | |||
""" | |||
Base class for simple request controls with boolean control value. | |||
Constructor argument and class attribute: | |||
booleanValue | |||
Boolean (True/False or 1/0) which is the boolean controlValue. | |||
""" | |||
boolean2ber = { 1:'\x01\x01\xFF', 0:'\x01\x01\x00' } | |||
ber2boolean = { '\x01\x01\xFF':1, '\x01\x01\x00':0 } | |||
def __init__(self,controlType=None,criticality=False,booleanValue=False): | |||
self.controlType = controlType | |||
self.criticality = criticality | |||
self.booleanValue = booleanValue | |||
def encodeControlValue(self): | |||
return self.boolean2ber[int(self.booleanValue)] | |||
def decodeControlValue(self,encodedControlValue): | |||
self.booleanValue = self.ber2boolean[encodedControlValue] | |||
class ManageDSAITControl(ValueLessRequestControl): | |||
""" | |||
Manage DSA IT Control | |||
""" | |||
def __init__(self,criticality=False): | |||
ValueLessRequestControl.__init__(self,ldap.CONTROL_MANAGEDSAIT,criticality=False) | |||
KNOWN_RESPONSE_CONTROLS[ldap.CONTROL_MANAGEDSAIT] = ManageDSAITControl | |||
class RelaxRulesControl(ValueLessRequestControl): | |||
""" | |||
Relax Rules Control | |||
""" | |||
def __init__(self,criticality=False): | |||
ValueLessRequestControl.__init__(self,ldap.CONTROL_RELAX,criticality=False) | |||
KNOWN_RESPONSE_CONTROLS[ldap.CONTROL_RELAX] = RelaxRulesControl | |||
class ProxyAuthzControl(RequestControl): | |||
""" | |||
Proxy Authorization Control | |||
authzId | |||
string containing the authorization ID indicating the identity | |||
on behalf which the server should process the request | |||
""" | |||
def __init__(self,criticality,authzId): | |||
RequestControl.__init__(self,ldap.CONTROL_PROXY_AUTHZ,criticality,authzId) | |||
class AuthorizationIdentityRequestControl(ValueLessRequestControl): | |||
""" | |||
Authorization Identity Request and Response Controls | |||
""" | |||
controlType = '2.16.840.1.113730.3.4.16' | |||
def __init__(self,criticality): | |||
ValueLessRequestControl.__init__(self,self.controlType,criticality) | |||
class AuthorizationIdentityResponseControl(ResponseControl): | |||
""" | |||
Authorization Identity Request and Response Controls | |||
Class attributes: | |||
authzId | |||
decoded authorization identity | |||
""" | |||
controlType = '2.16.840.1.113730.3.4.15' | |||
def decodeControlValue(self,encodedControlValue): | |||
self.authzId = encodedControlValue | |||
KNOWN_RESPONSE_CONTROLS[AuthorizationIdentityResponseControl.controlType] = AuthorizationIdentityResponseControl | |||
class GetEffectiveRightsControl(RequestControl): | |||
""" | |||
Get Effective Rights Control | |||
""" | |||
def __init__(self,criticality,authzId=None): | |||
RequestControl.__init__(self,'1.3.6.1.4.1.42.2.27.9.5.2',criticality,authzId) |
@@ -0,0 +1,133 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.sss - classes for Server Side Sorting | |||
(see RFC 2891) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'SSSRequestControl', | |||
'SSSResponseControl', | |||
] | |||
import ldap | |||
from ldap.ldapobject import LDAPObject | |||
from ldap.controls import (RequestControl, ResponseControl, | |||
KNOWN_RESPONSE_CONTROLS, DecodeControlTuples) | |||
from pyasn1.type import univ, namedtype, tag, namedval, constraint | |||
from pyasn1.codec.ber import encoder, decoder | |||
# SortKeyList ::= SEQUENCE OF SEQUENCE { | |||
# attributeType AttributeDescription, | |||
# orderingRule [0] MatchingRuleId OPTIONAL, | |||
# reverseOrder [1] BOOLEAN DEFAULT FALSE } | |||
class SortKeyType(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('attributeType', univ.OctetString()), | |||
namedtype.OptionalNamedType('orderingRule', | |||
univ.OctetString().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0) | |||
) | |||
), | |||
namedtype.DefaultedNamedType('reverseOrder', univ.Boolean(False).subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1)))) | |||
class SortKeyListType(univ.SequenceOf): | |||
componentType = SortKeyType() | |||
class SSSRequestControl(RequestControl): | |||
'''Order result server side | |||
>>> s = SSSRequestControl(ordering_rules=['-cn']) | |||
''' | |||
controlType = '1.2.840.113556.1.4.473' | |||
def __init__( | |||
self, | |||
criticality=False, | |||
ordering_rules=None, | |||
): | |||
RequestControl.__init__(self,self.controlType,criticality) | |||
self.ordering_rules = ordering_rules | |||
if isinstance(ordering_rules, basestring): | |||
ordering_rules = [ordering_rules] | |||
for rule in ordering_rules: | |||
rule = rule.split(':') | |||
assert len(rule) < 3, 'syntax for ordering rule: [-]<attribute-type>[:ordering-rule]' | |||
def asn1(self): | |||
p = SortKeyListType() | |||
for i, rule in enumerate(self.ordering_rules): | |||
q = SortKeyType() | |||
reverse_order = rule.startswith('-') | |||
if reverse_order: | |||
rule = rule[1:] | |||
if ':' in rule: | |||
attribute_type, ordering_rule = rule.split(':') | |||
else: | |||
attribute_type, ordering_rule = rule, None | |||
q.setComponentByName('attributeType', attribute_type) | |||
if ordering_rule: | |||
q.setComponentByName('orderingRule', ordering_rule) | |||
if reverse_order: | |||
q.setComponentByName('reverseOrder', 1) | |||
p.setComponentByPosition(i, q) | |||
return p | |||
def encodeControlValue(self): | |||
return encoder.encode(self.asn1()) | |||
class SortResultType(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('sortResult', univ.Enumerated().subtype( | |||
namedValues=namedval.NamedValues( | |||
('success', 0), | |||
('operationsError', 1), | |||
('timeLimitExceeded', 3), | |||
('strongAuthRequired', 8), | |||
('adminLimitExceeded', 11), | |||
('noSuchAttribute', 16), | |||
('inappropriateMatching', 18), | |||
('insufficientAccessRights', 50), | |||
('busy', 51), | |||
('unwillingToPerform', 53), | |||
('other', 80)), | |||
subtypeSpec=univ.Enumerated.subtypeSpec + constraint.SingleValueConstraint( | |||
0, 1, 3, 8, 11, 16, 18, 50, 51, 53, 80))), | |||
namedtype.OptionalNamedType('attributeType', | |||
univ.OctetString().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0) | |||
) | |||
)) | |||
class SSSResponseControl(ResponseControl): | |||
controlType = '1.2.840.113556.1.4.474' | |||
def __init__(self,criticality=False): | |||
ResponseControl.__init__(self,self.controlType,criticality) | |||
def decodeControlValue(self, encoded): | |||
p, rest = decoder.decode(encoded, asn1Spec=SortResultType()) | |||
assert not rest, 'all data could not be decoded' | |||
sort_result = p.getComponentByName('sortResult') | |||
self.sortResult = int(sort_result) | |||
attribute_type = p.getComponentByName('attributeType') | |||
if attribute_type.hasValue(): | |||
self.attributeType = attribute_type | |||
else: | |||
self.attributeType = None | |||
# backward compatibility class attributes | |||
self.result = self.sortResult | |||
self.attribute_type_error = self.attributeType | |||
KNOWN_RESPONSE_CONTROLS[SSSResponseControl.controlType] = SSSResponseControl |
@@ -0,0 +1,143 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.controls.vlv - classes for Virtual List View | |||
(see draft-ietf-ldapext-ldapv3-vlv) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
__all__ = [ | |||
'VLVRequestControl', | |||
'VLVResponseControl', | |||
] | |||
import ldap | |||
from ldap.ldapobject import LDAPObject | |||
from ldap.controls import (RequestControl, ResponseControl, | |||
KNOWN_RESPONSE_CONTROLS, DecodeControlTuples) | |||
from pyasn1.type import univ, namedtype, tag, namedval, constraint | |||
from pyasn1.codec.ber import encoder, decoder | |||
class ByOffsetType(univ.Sequence): | |||
tagSet = univ.Sequence.tagSet.tagImplicitly( | |||
tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)) | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('offset', univ.Integer()), | |||
namedtype.NamedType('contentCount', univ.Integer())) | |||
class TargetType(univ.Choice): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('byOffset', ByOffsetType()), | |||
namedtype.NamedType('greaterThanOrEqual', univ.OctetString().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, | |||
tag.tagFormatSimple, 1)))) | |||
class VirtualListViewRequestType(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('beforeCount', univ.Integer()), | |||
namedtype.NamedType('afterCount', univ.Integer()), | |||
namedtype.NamedType('target', TargetType()), | |||
namedtype.OptionalNamedType('contextID', univ.OctetString())) | |||
class VLVRequestControl(RequestControl): | |||
controlType = '2.16.840.1.113730.3.4.9' | |||
def __init__( | |||
self, | |||
criticality=False, | |||
before_count=0, | |||
after_count=0, | |||
offset=None, | |||
content_count=None, | |||
greater_than_or_equal=None, | |||
context_id=None, | |||
): | |||
RequestControl.__init__(self,self.controlType,criticality) | |||
assert (offset is not None and content_count is not None) or \ | |||
greater_than_or_equal, \ | |||
ValueError( | |||
'offset and content_count must be set together or greater_than_or_equal must be used' | |||
) | |||
self.before_count = before_count | |||
self.after_count = after_count | |||
self.offset = offset | |||
self.content_count = content_count | |||
self.greater_than_or_equal = greater_than_or_equal | |||
self.context_id = context_id | |||
def encodeControlValue(self): | |||
p = VirtualListViewRequestType() | |||
p.setComponentByName('beforeCount', self.before_count) | |||
p.setComponentByName('afterCount', self.after_count) | |||
if self.offset is not None and self.content_count is not None: | |||
by_offset = ByOffsetType() | |||
by_offset.setComponentByName('offset', self.offset) | |||
by_offset.setComponentByName('contentCount', self.content_count) | |||
target = TargetType() | |||
target.setComponentByName('byOffset', by_offset) | |||
elif self.greater_than_or_equal: | |||
target = TargetType() | |||
target.setComponentByName('greaterThanOrEqual', | |||
self.greater_than_or_equal) | |||
else: | |||
raise NotImplementedError | |||
p.setComponentByName('target', target) | |||
return encoder.encode(p) | |||
KNOWN_RESPONSE_CONTROLS[VLVRequestControl.controlType] = VLVRequestControl | |||
class VirtualListViewResultType(univ.Enumerated): | |||
namedValues = namedval.NamedValues( | |||
('success', 0), | |||
('operationsError', 1), | |||
('protocolError', 3), | |||
('unwillingToPerform', 53), | |||
('insufficientAccessRights', 50), | |||
('adminLimitExceeded', 11), | |||
('innapropriateMatching', 18), | |||
('sortControlMissing', 60), | |||
('offsetRangeError', 61), | |||
('other', 80), | |||
) | |||
class VirtualListViewResponseType(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('targetPosition', univ.Integer()), | |||
namedtype.NamedType('contentCount', univ.Integer()), | |||
namedtype.NamedType('virtualListViewResult', | |||
VirtualListViewResultType()), | |||
namedtype.OptionalNamedType('contextID', univ.OctetString())) | |||
class VLVResponseControl(ResponseControl): | |||
controlType = '2.16.840.1.113730.3.4.10' | |||
def __init__(self,criticality=False): | |||
ResponseControl.__init__(self,self.controlType,criticality) | |||
def decodeControlValue(self,encoded): | |||
p, rest = decoder.decode(encoded, asn1Spec=VirtualListViewResponseType()) | |||
assert not rest, 'all data could not be decoded' | |||
self.targetPosition = int(p.getComponentByName('targetPosition')) | |||
self.contentCount = int(p.getComponentByName('contentCount')) | |||
virtual_list_view_result = p.getComponentByName('virtualListViewResult') | |||
self.virtualListViewResult = int(virtual_list_view_result) | |||
context_id = p.getComponentByName('contextID') | |||
if context_id.hasValue(): | |||
self.contextID = str(context_id) | |||
else: | |||
self.contextID = None | |||
# backward compatibility class attributes | |||
self.target_position = self.targetPosition | |||
self.content_count = self.contentCount | |||
self.result = self.virtualListViewResult | |||
self.context_id = self.contextID | |||
KNOWN_RESPONSE_CONTROLS[VLVResponseControl.controlType] = VLVResponseControl |
@@ -0,0 +1,122 @@ | |||
""" | |||
dn.py - misc stuff for handling distinguished names (see RFC 4514) | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import sys | |||
from ldap.pkginfo import __version__ | |||
import _ldap | |||
assert _ldap.__version__==__version__, \ | |||
ImportError('ldap %s and _ldap %s version mismatch!' % (__version__,_ldap.__version__)) | |||
import ldap.functions | |||
def escape_dn_chars(s): | |||
""" | |||
Escape all DN special characters found in s | |||
with a back-slash (see RFC 4514, section 2.4) | |||
""" | |||
if s: | |||
s = s.replace('\\','\\\\') | |||
s = s.replace(',' ,'\\,') | |||
s = s.replace('+' ,'\\+') | |||
s = s.replace('"' ,'\\"') | |||
s = s.replace('<' ,'\\<') | |||
s = s.replace('>' ,'\\>') | |||
s = s.replace(';' ,'\\;') | |||
s = s.replace('=' ,'\\=') | |||
s = s.replace('\000' ,'\\\000') | |||
if s[0]=='#' or s[0]==' ': | |||
s = ''.join(('\\',s)) | |||
if s[-1]==' ': | |||
s = ''.join((s[:-1],'\\ ')) | |||
return s | |||
def str2dn(dn,flags=0): | |||
""" | |||
This function takes a DN as string as parameter and returns | |||
a decomposed DN. It's the inverse to dn2str(). | |||
flags describes the format of the dn | |||
See also the OpenLDAP man-page ldap_str2dn(3) | |||
""" | |||
if not dn: | |||
return [] | |||
if sys.version_info[0] < 3 and isinstance(dn, unicode): | |||
dn = dn.encode('utf-8') | |||
return ldap.functions._ldap_function_call(None,_ldap.str2dn,dn,flags) | |||
def dn2str(dn): | |||
""" | |||
This function takes a decomposed DN as parameter and returns | |||
a single string. It's the inverse to str2dn() but will always | |||
return a DN in LDAPv3 format compliant to RFC 4514. | |||
""" | |||
return ','.join([ | |||
'+'.join([ | |||
'='.join((atype,escape_dn_chars(avalue or ''))) | |||
for atype,avalue,dummy in rdn]) | |||
for rdn in dn | |||
]) | |||
def explode_dn(dn, notypes=False, flags=0): | |||
""" | |||
explode_dn(dn [, notypes=False [, flags=0]]) -> list | |||
This function takes a DN and breaks it up into its component parts. | |||
The notypes parameter is used to specify that only the component's | |||
attribute values be returned and not the attribute types. | |||
""" | |||
if not dn: | |||
return [] | |||
dn_decomp = str2dn(dn,flags) | |||
rdn_list = [] | |||
for rdn in dn_decomp: | |||
if notypes: | |||
rdn_list.append('+'.join([ | |||
escape_dn_chars(avalue or '') | |||
for atype,avalue,dummy in rdn | |||
])) | |||
else: | |||
rdn_list.append('+'.join([ | |||
'='.join((atype,escape_dn_chars(avalue or ''))) | |||
for atype,avalue,dummy in rdn | |||
])) | |||
return rdn_list | |||
def explode_rdn(rdn, notypes=False, flags=0): | |||
""" | |||
explode_rdn(rdn [, notypes=0 [, flags=0]]) -> list | |||
This function takes a RDN and breaks it up into its component parts | |||
if it is a multi-valued RDN. | |||
The notypes parameter is used to specify that only the component's | |||
attribute values be returned and not the attribute types. | |||
""" | |||
if not rdn: | |||
return [] | |||
rdn_decomp = str2dn(rdn,flags)[0] | |||
if notypes: | |||
return [avalue or '' for atype,avalue,dummy in rdn_decomp] | |||
else: | |||
return ['='.join((atype,escape_dn_chars(avalue or ''))) for atype,avalue,dummy in rdn_decomp] | |||
def is_dn(s,flags=0): | |||
""" | |||
Returns True is `s' can be parsed by ldap.dn.str2dn() like as a | |||
distinguished host_name (DN), otherwise False is returned. | |||
""" | |||
try: | |||
str2dn(s,flags) | |||
except Exception: | |||
return False | |||
else: | |||
return True |
@@ -0,0 +1,67 @@ | |||
""" | |||
controls.py - support classes for LDAPv3 extended operations | |||
See https://www.python-ldap.org/ for details. | |||
Description: | |||
The ldap.extop module provides base classes for LDAPv3 extended operations. | |||
Each class provides support for a certain extended operation request and | |||
response. | |||
""" | |||
from ldap import __version__ | |||
class ExtendedRequest: | |||
""" | |||
Generic base class for a LDAPv3 extended operation request | |||
requestName | |||
OID as string of the LDAPv3 extended operation request | |||
requestValue | |||
value of the LDAPv3 extended operation request | |||
(here it is the BER-encoded ASN.1 request value) | |||
""" | |||
def __init__(self,requestName,requestValue): | |||
self.requestName = requestName | |||
self.requestValue = requestValue | |||
def __repr__(self): | |||
return '%s(%s,%s)' % (self.__class__.__name__,self.requestName,self.requestValue) | |||
def encodedRequestValue(self): | |||
""" | |||
returns the BER-encoded ASN.1 request value composed by class attributes | |||
set before | |||
""" | |||
return self.requestValue | |||
class ExtendedResponse: | |||
""" | |||
Generic base class for a LDAPv3 extended operation response | |||
requestName | |||
OID as string of the LDAPv3 extended operation response | |||
encodedResponseValue | |||
BER-encoded ASN.1 value of the LDAPv3 extended operation response | |||
""" | |||
def __init__(self,responseName,encodedResponseValue): | |||
self.responseName = responseName | |||
self.responseValue = self.decodeResponseValue(encodedResponseValue) | |||
def __repr__(self): | |||
return '%s(%s,%s)' % (self.__class__.__name__,self.responseName,self.responseValue) | |||
def decodeResponseValue(self,value): | |||
""" | |||
decodes the BER-encoded ASN.1 extended operation response value and | |||
sets the appropriate class attributes | |||
""" | |||
return value | |||
# Import sub-modules | |||
from ldap.extop.dds import * |
@@ -0,0 +1,75 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.extop.dds - Classes for Dynamic Entries extended operations | |||
(see RFC 2589) | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap.extop import ExtendedRequest,ExtendedResponse | |||
# Imports from pyasn1 | |||
from pyasn1.type import namedtype,univ,tag | |||
from pyasn1.codec.der import encoder,decoder | |||
from pyasn1_modules.rfc2251 import LDAPDN | |||
class RefreshRequest(ExtendedRequest): | |||
requestName = '1.3.6.1.4.1.1466.101.119.1' | |||
defaultRequestTtl = 86400 | |||
class RefreshRequestValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType( | |||
'entryName', | |||
LDAPDN().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,0) | |||
) | |||
), | |||
namedtype.NamedType( | |||
'requestTtl', | |||
univ.Integer().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,1) | |||
) | |||
), | |||
) | |||
def __init__(self,requestName=None,entryName=None,requestTtl=None): | |||
self.entryName = entryName | |||
self.requestTtl = requestTtl or self.defaultRequestTtl | |||
def encodedRequestValue(self): | |||
p = self.RefreshRequestValue() | |||
p.setComponentByName( | |||
'entryName', | |||
LDAPDN(self.entryName).subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple,0) | |||
) | |||
) | |||
p.setComponentByName( | |||
'requestTtl', | |||
univ.Integer(self.requestTtl).subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,1) | |||
) | |||
) | |||
return encoder.encode(p) | |||
class RefreshResponse(ExtendedResponse): | |||
responseName = '1.3.6.1.4.1.1466.101.119.1' | |||
class RefreshResponseValue(univ.Sequence): | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType( | |||
'responseTtl', | |||
univ.Integer().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatSimple,1) | |||
) | |||
) | |||
) | |||
def decodeResponseValue(self,value): | |||
respValue,_ = decoder.decode(value,asn1Spec=self.RefreshResponseValue()) | |||
self.responseTtl = int(respValue.getComponentByName('responseTtl')) | |||
return self.responseTtl |
@@ -0,0 +1,89 @@ | |||
""" | |||
filters.py - misc stuff for handling LDAP filter strings (see RFC2254) | |||
See https://www.python-ldap.org/ for details. | |||
Compatibility: | |||
- Tested with Python 2.0+ | |||
""" | |||
from ldap import __version__ | |||
from ldap.functions import strf_secs | |||
import time | |||
def escape_filter_chars(assertion_value,escape_mode=0): | |||
""" | |||
Replace all special characters found in assertion_value | |||
by quoted notation. | |||
escape_mode | |||
If 0 only special chars mentioned in RFC 4515 are escaped. | |||
If 1 all NON-ASCII chars are escaped. | |||
If 2 all chars are escaped. | |||
""" | |||
if escape_mode: | |||
r = [] | |||
if escape_mode==1: | |||
for c in assertion_value: | |||
if c < '0' or c > 'z' or c in "\\*()": | |||
c = "\\%02x" % ord(c) | |||
r.append(c) | |||
elif escape_mode==2: | |||
for c in assertion_value: | |||
r.append("\\%02x" % ord(c)) | |||
else: | |||
raise ValueError('escape_mode must be 0, 1 or 2.') | |||
s = ''.join(r) | |||
else: | |||
s = assertion_value.replace('\\', r'\5c') | |||
s = s.replace(r'*', r'\2a') | |||
s = s.replace(r'(', r'\28') | |||
s = s.replace(r')', r'\29') | |||
s = s.replace('\x00', r'\00') | |||
return s | |||
def filter_format(filter_template,assertion_values): | |||
""" | |||
filter_template | |||
String containing %s as placeholder for assertion values. | |||
assertion_values | |||
List or tuple of assertion values. Length must match | |||
count of %s in filter_template. | |||
""" | |||
return filter_template % tuple(escape_filter_chars(v) for v in assertion_values) | |||
def time_span_filter( | |||
filterstr='', | |||
from_timestamp=0, | |||
until_timestamp=None, | |||
delta_attr='modifyTimestamp', | |||
): | |||
""" | |||
If last_run_timestr is non-zero filterstr will be extended | |||
""" | |||
if until_timestamp is None: | |||
until_timestamp = time.time() | |||
if from_timestamp < 0: | |||
from_timestamp = until_timestamp + from_timestamp | |||
if from_timestamp > until_timestamp: | |||
raise ValueError('from_timestamp %r must not be greater than until_timestamp %r' % ( | |||
from_timestamp, until_timestamp | |||
)) | |||
return ( | |||
'(&' | |||
'{filterstr}' | |||
'({delta_attr}>={from_timestr})' | |||
'(!({delta_attr}>={until_timestr}))' | |||
')' | |||
).format( | |||
filterstr=filterstr, | |||
delta_attr=delta_attr, | |||
from_timestr=strf_secs(from_timestamp), | |||
until_timestr=strf_secs(until_timestamp), | |||
) | |||
# end of time_span_filter() |
@@ -0,0 +1,125 @@ | |||
""" | |||
functions.py - wraps functions of module _ldap | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap import __version__ | |||
__all__ = [ | |||
'open','initialize','init', | |||
'explode_dn','explode_rdn', | |||
'get_option','set_option', | |||
'escape_str', | |||
'strf_secs','strp_secs', | |||
] | |||
import sys,pprint,time,_ldap,ldap | |||
from calendar import timegm | |||
from ldap import LDAPError | |||
from ldap.dn import explode_dn,explode_rdn | |||
from ldap.ldapobject import LDAPObject | |||
if __debug__: | |||
# Tracing is only supported in debugging mode | |||
import traceback | |||
# See _raise_byteswarning in ldapobject.py | |||
_LDAP_WARN_SKIP_FRAME = True | |||
def _ldap_function_call(lock,func,*args,**kwargs): | |||
""" | |||
Wrapper function which locks and logs calls to function | |||
lock | |||
Instance of threading.Lock or compatible | |||
func | |||
Function to call with arguments passed in via *args and **kwargs | |||
""" | |||
if lock: | |||
lock.acquire() | |||
if __debug__: | |||
if ldap._trace_level>=1: | |||
ldap._trace_file.write('*** %s.%s %s\n' % ( | |||
'_ldap',func.__name__, | |||
pprint.pformat((args,kwargs)) | |||
)) | |||
if ldap._trace_level>=9: | |||
traceback.print_stack(limit=ldap._trace_stack_limit,file=ldap._trace_file) | |||
try: | |||
try: | |||
result = func(*args,**kwargs) | |||
finally: | |||
if lock: | |||
lock.release() | |||
except LDAPError as e: | |||
if __debug__ and ldap._trace_level>=2: | |||
ldap._trace_file.write('=> LDAPError: %s\n' % (str(e))) | |||
raise | |||
if __debug__ and ldap._trace_level>=2: | |||
ldap._trace_file.write('=> result:\n%s\n' % (pprint.pformat(result))) | |||
return result | |||
def initialize(uri,trace_level=0,trace_file=sys.stdout,trace_stack_limit=None, bytes_mode=None): | |||
""" | |||
Return LDAPObject instance by opening LDAP connection to | |||
LDAP host specified by LDAP URL | |||
Parameters: | |||
uri | |||
LDAP URL containing at least connection scheme and hostport, | |||
e.g. ldap://localhost:389 | |||
trace_level | |||
If non-zero a trace output of LDAP calls is generated. | |||
trace_file | |||
File object where to write the trace output to. | |||
Default is to use stdout. | |||
bytes_mode | |||
Whether to enable :ref:`bytes_mode` for backwards compatibility under Py2. | |||
""" | |||
return LDAPObject(uri,trace_level,trace_file,trace_stack_limit,bytes_mode) | |||
def get_option(option): | |||
""" | |||
get_option(name) -> value | |||
Get the value of an LDAP global option. | |||
""" | |||
return _ldap_function_call(None,_ldap.get_option,option) | |||
def set_option(option,invalue): | |||
""" | |||
set_option(name, value) | |||
Set the value of an LDAP global option. | |||
""" | |||
return _ldap_function_call(None,_ldap.set_option,option,invalue) | |||
def escape_str(escape_func,s,*args): | |||
""" | |||
Applies escape_func() to all items of `args' and returns a string based | |||
on format string `s'. | |||
""" | |||
return s % tuple(escape_func(v) for v in args) | |||
def strf_secs(secs): | |||
""" | |||
Convert seconds since epoch to a string compliant to LDAP syntax GeneralizedTime | |||
""" | |||
return time.strftime('%Y%m%d%H%M%SZ', time.gmtime(secs)) | |||
def strp_secs(dt_str): | |||
""" | |||
Convert LDAP syntax GeneralizedTime to seconds since epoch | |||
""" | |||
return timegm(time.strptime(dt_str, '%Y%m%d%H%M%SZ')) |
@@ -0,0 +1,19 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
Helper class for using logging as trace file object | |||
""" | |||
import logging | |||
class logging_file_class(object): | |||
def __init__(self, logging_level): | |||
self._logging_level = logging_level | |||
def write(self, msg): | |||
logging.log(self._logging_level, msg[:-1]) | |||
def flush(self): | |||
return | |||
logging_file_obj = logging_file_class(logging.DEBUG) |
@@ -0,0 +1,97 @@ | |||
""" | |||
ldap.modlist - create add/modify modlist's | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap import __version__ | |||
import ldap | |||
def addModlist(entry,ignore_attr_types=None): | |||
"""Build modify list for call of method LDAPObject.add()""" | |||
ignore_attr_types = {v.lower() for v in ignore_attr_types or []} | |||
modlist = [] | |||
for attrtype, value in entry.items(): | |||
if attrtype.lower() in ignore_attr_types: | |||
# This attribute type is ignored | |||
continue | |||
# Eliminate empty attr value strings in list | |||
attrvaluelist = [item for item in value if item is not None] | |||
if attrvaluelist: | |||
modlist.append((attrtype, value)) | |||
return modlist # addModlist() | |||
def modifyModlist( | |||
old_entry,new_entry,ignore_attr_types=None,ignore_oldexistent=0,case_ignore_attr_types=None | |||
): | |||
""" | |||
Build differential modify list for calling LDAPObject.modify()/modify_s() | |||
old_entry | |||
Dictionary holding the old entry | |||
new_entry | |||
Dictionary holding what the new entry should be | |||
ignore_attr_types | |||
List of attribute type names to be ignored completely | |||
ignore_oldexistent | |||
If non-zero attribute type names which are in old_entry | |||
but are not found in new_entry at all are not deleted. | |||
This is handy for situations where your application | |||
sets attribute value to '' for deleting an attribute. | |||
In most cases leave zero. | |||
case_ignore_attr_types | |||
List of attribute type names for which comparison will be made | |||
case-insensitive | |||
""" | |||
ignore_attr_types = {v.lower() for v in ignore_attr_types or []} | |||
case_ignore_attr_types = {v.lower() for v in case_ignore_attr_types or []} | |||
modlist = [] | |||
attrtype_lower_map = {} | |||
for a in old_entry.keys(): | |||
attrtype_lower_map[a.lower()]=a | |||
for attrtype, value in new_entry.items(): | |||
attrtype_lower = attrtype.lower() | |||
if attrtype_lower in ignore_attr_types: | |||
# This attribute type is ignored | |||
continue | |||
# Filter away null-strings | |||
new_value = [item for item in value if item is not None] | |||
if attrtype_lower in attrtype_lower_map: | |||
old_value = old_entry.get(attrtype_lower_map[attrtype_lower],[]) | |||
old_value = [item for item in old_value if item is not None] | |||
del attrtype_lower_map[attrtype_lower] | |||
else: | |||
old_value = [] | |||
if not old_value and new_value: | |||
# Add a new attribute to entry | |||
modlist.append((ldap.MOD_ADD,attrtype,new_value)) | |||
elif old_value and new_value: | |||
# Replace existing attribute | |||
replace_attr_value = len(old_value)!=len(new_value) | |||
if not replace_attr_value: | |||
if attrtype_lower in case_ignore_attr_types: | |||
old_value_set = {v.lower() for v in old_value} | |||
new_value_set = {v.lower() for v in new_value} | |||
else: | |||
old_value_set = set(old_value) | |||
new_value_set = set(new_value) | |||
replace_attr_value = new_value_set != old_value_set | |||
if replace_attr_value: | |||
modlist.append((ldap.MOD_DELETE,attrtype,None)) | |||
modlist.append((ldap.MOD_ADD,attrtype,new_value)) | |||
elif old_value and not new_value: | |||
# Completely delete an existing attribute | |||
modlist.append((ldap.MOD_DELETE,attrtype,None)) | |||
if not ignore_oldexistent: | |||
# Remove all attributes of old_entry which are not present | |||
# in new_entry at all | |||
for a, val in attrtype_lower_map.items(): | |||
if a in ignore_attr_types: | |||
# This attribute type is ignored | |||
continue | |||
attrtype = val | |||
modlist.append((ldap.MOD_DELETE,attrtype,None)) | |||
return modlist # modifyModlist() |
@@ -0,0 +1,7 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
meta attributes for packaging which does not import any dependencies | |||
""" | |||
__version__ = '3.1.0' | |||
__author__ = u'python-ldap project' | |||
__license__ = 'Python style' |
@@ -0,0 +1,41 @@ | |||
""" | |||
ldap.resiter - processing LDAP results with iterators | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap.pkginfo import __version__, __author__, __license__ | |||
class ResultProcessor: | |||
""" | |||
Mix-in class used with ldap.ldapopbject.LDAPObject or derived classes. | |||
""" | |||
def allresults(self, msgid, timeout=-1, add_ctrls=0): | |||
""" | |||
Generator function which returns an iterator for processing all LDAP operation | |||
results of the given msgid like retrieved with LDAPObject.result3() -> 4-tuple | |||
""" | |||
result_type, result_list, result_msgid, result_serverctrls, _, _ = \ | |||
self.result4( | |||
msgid, | |||
0, | |||
timeout, | |||
add_ctrls=add_ctrls | |||
) | |||
while result_type and result_list: | |||
yield ( | |||
result_type, | |||
result_list, | |||
result_msgid, | |||
result_serverctrls | |||
) | |||
result_type, result_list, result_msgid, result_serverctrls, _, _ = \ | |||
self.result4( | |||
msgid, | |||
0, | |||
timeout, | |||
add_ctrls=add_ctrls | |||
) | |||
return # allresults() |
@@ -0,0 +1,134 @@ | |||
""" | |||
sasl.py - support for SASL mechanism | |||
See https://www.python-ldap.org/ for details. | |||
Description: | |||
The ldap.sasl module provides SASL authentication classes. | |||
Each class provides support for one SASL mechanism. This is done by | |||
implementing a callback() - method, which will be called by the | |||
LDAPObject's sasl_bind_s() method | |||
Implementing support for new sasl mechanism is very easy --- see | |||
the examples of digest_md5 and gssapi. | |||
""" | |||
from ldap import __version__ | |||
if __debug__: | |||
# Tracing is only supported in debugging mode | |||
from ldap import _trace_level, _trace_file | |||
# These are the SASL callback id's , as defined in sasl.h | |||
CB_USER = 0x4001 | |||
CB_AUTHNAME = 0x4002 | |||
CB_LANGUAGE = 0x4003 | |||
CB_PASS = 0x4004 | |||
CB_ECHOPROMPT = 0x4005 | |||
CB_NOECHOPROMPT = 0x4006 | |||
CB_GETREALM = 0x4008 | |||
class sasl: | |||
""" | |||
This class handles SASL interactions for authentication. | |||
If an instance of this class is passed to ldap's sasl_bind_s() | |||
method, the library will call its callback() method. For | |||
specific SASL authentication mechanisms, this method can be | |||
overridden | |||
""" | |||
def __init__(self, cb_value_dict, mech): | |||
""" | |||
The (generic) base class takes a cb_value_dictionary of | |||
question-answer pairs. Questions are specified by the respective | |||
SASL callback id's. The mech argument is a string that specifies | |||
the SASL mechaninsm to be uesd. | |||
""" | |||
self.cb_value_dict = cb_value_dict or {} | |||
if not isinstance(mech, bytes): | |||
mech = mech.encode('utf-8') | |||
self.mech = mech | |||
def callback(self, cb_id, challenge, prompt, defresult): | |||
""" | |||
The callback method will be called by the sasl_bind_s() | |||
method several times. Each time it will provide the id, which | |||
tells us what kind of information is requested (the CB_* | |||
constants above). The challenge might be a short (English) text | |||
or some binary string, from which the return value is calculated. | |||
The prompt argument is always a human-readable description string; | |||
The defresult is a default value provided by the sasl library | |||
Currently, we do not use the challenge and prompt information, and | |||
return only information which is stored in the self.cb_value_dict | |||
cb_value_dictionary. Note that the current callback interface is not very | |||
useful for writing generic sasl GUIs, which would need to know all | |||
the questions to ask, before the answers are returned to the sasl | |||
lib (in contrast to one question at a time). | |||
Unicode strings are always converted to bytes. | |||
""" | |||
# The following print command might be useful for debugging | |||
# new sasl mechanisms. So it is left here | |||
cb_result = self.cb_value_dict.get(cb_id, defresult) or '' | |||
if __debug__: | |||
if _trace_level >= 1: | |||
_trace_file.write("*** id=%d, challenge=%s, prompt=%s, defresult=%s\n-> %s\n" % ( | |||
cb_id, | |||
challenge, | |||
prompt, | |||
repr(defresult), | |||
repr(self.cb_value_dict.get(cb_result)) | |||
)) | |||
if not isinstance(cb_result, bytes): | |||
cb_result = cb_result.encode('utf-8') | |||
return cb_result | |||
class cram_md5(sasl): | |||
""" | |||
This class handles SASL CRAM-MD5 authentication. | |||
""" | |||
def __init__(self, authc_id, password, authz_id=""): | |||
auth_dict = { | |||
CB_AUTHNAME: authc_id, | |||
CB_PASS: password, | |||
CB_USER: authz_id, | |||
} | |||
sasl.__init__(self, auth_dict, "CRAM-MD5") | |||
class digest_md5(sasl): | |||
""" | |||
This class handles SASL DIGEST-MD5 authentication. | |||
""" | |||
def __init__(self, authc_id, password, authz_id=""): | |||
auth_dict = { | |||
CB_AUTHNAME: authc_id, | |||
CB_PASS: password, | |||
CB_USER: authz_id, | |||
} | |||
sasl.__init__(self, auth_dict, "DIGEST-MD5") | |||
class gssapi(sasl): | |||
""" | |||
This class handles SASL GSSAPI (i.e. Kerberos V) authentication. | |||
""" | |||
def __init__(self, authz_id=""): | |||
sasl.__init__(self, {CB_USER: authz_id}, "GSSAPI") | |||
class external(sasl): | |||
""" | |||
This class handles SASL EXTERNAL authentication | |||
(i.e. X.509 client certificate) | |||
""" | |||
def __init__(self, authz_id=""): | |||
sasl.__init__(self, {CB_USER: authz_id}, "EXTERNAL") |
@@ -0,0 +1,10 @@ | |||
""" | |||
ldap.schema - LDAPv3 schema handling | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
from ldap import __version__ | |||
from ldap.schema.subentry import SubSchema,SCHEMA_ATTRS,SCHEMA_CLASS_MAPPING,SCHEMA_ATTR_MAPPING,urlfetch | |||
from ldap.schema.models import * |
@@ -0,0 +1,701 @@ | |||
""" | |||
schema.py - support for subSchemaSubEntry information | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import sys | |||
import ldap.cidict | |||
from ldap.compat import IterableUserDict | |||
from ldap.schema.tokenizer import split_tokens,extract_tokens | |||
NOT_HUMAN_READABLE_LDAP_SYNTAXES = { | |||
'1.3.6.1.4.1.1466.115.121.1.4', # Audio | |||
'1.3.6.1.4.1.1466.115.121.1.5', # Binary | |||
'1.3.6.1.4.1.1466.115.121.1.8', # Certificate | |||
'1.3.6.1.4.1.1466.115.121.1.9', # Certificate List | |||
'1.3.6.1.4.1.1466.115.121.1.10', # Certificate Pair | |||
'1.3.6.1.4.1.1466.115.121.1.23', # G3 FAX | |||
'1.3.6.1.4.1.1466.115.121.1.28', # JPEG | |||
'1.3.6.1.4.1.1466.115.121.1.40', # Octet String | |||
'1.3.6.1.4.1.1466.115.121.1.49', # Supported Algorithm | |||
} | |||
class SchemaElement: | |||
""" | |||
Base class for all schema element classes. Not used directly! | |||
Arguments: | |||
schema_element_str | |||
String which contains the schema element description to be parsed. | |||
(Bytestrings are decoded using UTF-8) | |||
Class attributes: | |||
schema_attribute | |||
LDAP attribute type containing a certain schema element description | |||
token_defaults | |||
Dictionary internally used by the schema element parser | |||
containing the defaults for certain schema description key-words | |||
""" | |||
token_defaults = { | |||
'DESC':(None,), | |||
} | |||
def __init__(self,schema_element_str=None): | |||
if sys.version_info >= (3, 0) and isinstance(schema_element_str, bytes): | |||
schema_element_str = schema_element_str.decode('utf-8') | |||
if schema_element_str: | |||
l = split_tokens(schema_element_str) | |||
self.set_id(l[1]) | |||
d = extract_tokens(l,self.token_defaults) | |||
self._set_attrs(l,d) | |||
def _set_attrs(self,l,d): | |||
self.desc = d['DESC'][0] | |||
return | |||
def set_id(self,element_id): | |||
self.oid = element_id | |||
def get_id(self): | |||
return self.oid | |||
def key_attr(self,key,value,quoted=0): | |||
assert value is None or type(value)==str,TypeError("value has to be of str, was %r" % value) | |||
if value: | |||
if quoted: | |||
return " %s '%s'" % (key,value.replace("'","\\'")) | |||
else: | |||
return " %s %s" % (key,value) | |||
else: | |||
return "" | |||
def key_list(self,key,values,sep=' ',quoted=0): | |||
assert type(values)==tuple,TypeError("values has to be a tuple, was %r" % values) | |||
if not values: | |||
return '' | |||
if quoted: | |||
quoted_values = [ "'%s'" % value.replace("'","\\'") for value in values ] | |||
else: | |||
quoted_values = values | |||
if len(values)==1: | |||
return ' %s %s' % (key,quoted_values[0]) | |||
else: | |||
return ' %s ( %s )' % (key,sep.join(quoted_values)) | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
return '( %s )' % ''.join(result) | |||
class ObjectClass(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an ObjectClassDescription | |||
Class attributes: | |||
oid | |||
OID assigned to the object class | |||
names | |||
This list of strings contains all NAMEs of the object class | |||
desc | |||
This string contains description text (DESC) of the object class | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the object class is marked | |||
as OBSOLETE in the schema | |||
must | |||
This list of strings contains NAMEs or OIDs of all attributes | |||
an entry of the object class must have | |||
may | |||
This list of strings contains NAMEs or OIDs of additional attributes | |||
an entry of the object class may have | |||
kind | |||
Kind of an object class: | |||
0 = STRUCTURAL, | |||
1 = ABSTRACT, | |||
2 = AUXILIARY | |||
sup | |||
This list of strings contains NAMEs or OIDs of object classes | |||
this object class is derived from | |||
""" | |||
schema_attribute = u'objectClasses' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'SUP':(()), | |||
'STRUCTURAL':None, | |||
'AUXILIARY':None, | |||
'ABSTRACT':None, | |||
'MUST':(()), | |||
'MAY':() | |||
} | |||
def _set_attrs(self,l,d): | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.must = d['MUST'] | |||
self.may = d['MAY'] | |||
# Default is STRUCTURAL, see RFC2552 or draft-ietf-ldapbis-syntaxes | |||
self.kind = 0 | |||
if d['ABSTRACT']!=None: | |||
self.kind = 1 | |||
elif d['AUXILIARY']!=None: | |||
self.kind = 2 | |||
if self.kind==0 and not d['SUP'] and self.oid!='2.5.6.0': | |||
# STRUCTURAL object classes are sub-classes of 'top' by default | |||
self.sup = ('top',) | |||
else: | |||
self.sup = d['SUP'] | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append(self.key_list('SUP',self.sup,sep=' $ ')) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append({0:' STRUCTURAL',1:' ABSTRACT',2:' AUXILIARY'}[self.kind]) | |||
result.append(self.key_list('MUST',self.must,sep=' $ ')) | |||
result.append(self.key_list('MAY',self.may,sep=' $ ')) | |||
return '( %s )' % ''.join(result) | |||
AttributeUsage = ldap.cidict.cidict({ | |||
'userApplication':0, # work-around for non-compliant schema | |||
'userApplications':0, | |||
'directoryOperation':1, | |||
'distributedOperation':2, | |||
'dSAOperation':3, | |||
}) | |||
class AttributeType(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an AttributeTypeDescription | |||
Class attributes: | |||
oid | |||
OID assigned to the attribute type | |||
names | |||
This list of strings contains all NAMEs of the attribute type | |||
desc | |||
This string contains description text (DESC) of the attribute type | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the attribute type is marked | |||
as OBSOLETE in the schema | |||
single_value | |||
Integer flag (0 or 1) indicating whether the attribute must | |||
have only one value | |||
syntax | |||
String contains OID of the LDAP syntax assigned to the attribute type | |||
no_user_mod | |||
Integer flag (0 or 1) indicating whether the attribute is modifiable | |||
by a client application | |||
equality | |||
String contains NAME or OID of the matching rule used for | |||
checking whether attribute values are equal | |||
substr | |||
String contains NAME or OID of the matching rule used for | |||
checking whether an attribute value contains another value | |||
ordering | |||
String contains NAME or OID of the matching rule used for | |||
checking whether attribute values are lesser-equal than | |||
usage | |||
USAGE of an attribute type: | |||
0 = userApplications | |||
1 = directoryOperation, | |||
2 = distributedOperation, | |||
3 = dSAOperation | |||
sup | |||
This list of strings contains NAMEs or OIDs of attribute types | |||
this attribute type is derived from | |||
""" | |||
schema_attribute = u'attributeTypes' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'SUP':(()), | |||
'EQUALITY':(None,), | |||
'ORDERING':(None,), | |||
'SUBSTR':(None,), | |||
'SYNTAX':(None,), | |||
'SINGLE-VALUE':None, | |||
'COLLECTIVE':None, | |||
'NO-USER-MODIFICATION':None, | |||
'USAGE':('userApplications',), | |||
'X-ORIGIN':(None,), | |||
'X-ORDERED':(None,), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.sup = d['SUP'] | |||
self.equality = d['EQUALITY'][0] | |||
self.ordering = d['ORDERING'][0] | |||
self.substr = d['SUBSTR'][0] | |||
self.x_origin = d['X-ORIGIN'][0] | |||
self.x_ordered = d['X-ORDERED'][0] | |||
try: | |||
syntax = d['SYNTAX'][0] | |||
except IndexError: | |||
self.syntax = None | |||
self.syntax_len = None | |||
else: | |||
if syntax is None: | |||
self.syntax = None | |||
self.syntax_len = None | |||
else: | |||
try: | |||
self.syntax,syntax_len = d['SYNTAX'][0].split("{") | |||
except ValueError: | |||
self.syntax = d['SYNTAX'][0] | |||
self.syntax_len = None | |||
for i in l: | |||
if i.startswith("{") and i.endswith("}"): | |||
self.syntax_len = int(i[1:-1]) | |||
else: | |||
self.syntax_len = int(syntax_len[:-1]) | |||
self.single_value = d['SINGLE-VALUE']!=None | |||
self.collective = d['COLLECTIVE']!=None | |||
self.no_user_mod = d['NO-USER-MODIFICATION']!=None | |||
self.usage = AttributeUsage.get(d['USAGE'][0],0) | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append(self.key_list('SUP',self.sup,sep=' $ ')) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_attr('EQUALITY',self.equality)) | |||
result.append(self.key_attr('ORDERING',self.ordering)) | |||
result.append(self.key_attr('SUBSTR',self.substr)) | |||
result.append(self.key_attr('SYNTAX',self.syntax)) | |||
if self.syntax_len!=None: | |||
result.append(('{%d}' % (self.syntax_len))*(self.syntax_len>0)) | |||
result.append({0:'',1:' SINGLE-VALUE'}[self.single_value]) | |||
result.append({0:'',1:' COLLECTIVE'}[self.collective]) | |||
result.append({0:'',1:' NO-USER-MODIFICATION'}[self.no_user_mod]) | |||
result.append( | |||
{ | |||
0:"", | |||
1:" USAGE directoryOperation", | |||
2:" USAGE distributedOperation", | |||
3:" USAGE dSAOperation", | |||
}[self.usage] | |||
) | |||
result.append(self.key_attr('X-ORIGIN',self.x_origin,quoted=1)) | |||
result.append(self.key_attr('X-ORDERED',self.x_ordered,quoted=1)) | |||
return '( %s )' % ''.join(result) | |||
class LDAPSyntax(SchemaElement): | |||
""" | |||
SyntaxDescription | |||
oid | |||
OID assigned to the LDAP syntax | |||
desc | |||
This string contains description text (DESC) of the LDAP syntax | |||
not_human_readable | |||
Integer flag (0 or 1) indicating whether the attribute type is marked | |||
as not human-readable (X-NOT-HUMAN-READABLE) | |||
""" | |||
schema_attribute = u'ldapSyntaxes' | |||
token_defaults = { | |||
'DESC':(None,), | |||
'X-NOT-HUMAN-READABLE':(None,), | |||
'X-BINARY-TRANSFER-REQUIRED':(None,), | |||
'X-SUBST':(None,), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.desc = d['DESC'][0] | |||
self.x_subst = d['X-SUBST'][0] | |||
self.not_human_readable = \ | |||
self.oid in NOT_HUMAN_READABLE_LDAP_SYNTAXES or \ | |||
d['X-NOT-HUMAN-READABLE'][0]=='TRUE' | |||
self.x_binary_transfer_required = d['X-BINARY-TRANSFER-REQUIRED'][0]=='TRUE' | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append(self.key_attr('X-SUBST',self.x_subst,quoted=1)) | |||
result.append( | |||
{0:'',1:" X-NOT-HUMAN-READABLE 'TRUE'"}[self.not_human_readable] | |||
) | |||
return '( %s )' % ''.join(result) | |||
class MatchingRule(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an MatchingRuleDescription | |||
Class attributes: | |||
oid | |||
OID assigned to the matching rule | |||
names | |||
This list of strings contains all NAMEs of the matching rule | |||
desc | |||
This string contains description text (DESC) of the matching rule | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the matching rule is marked | |||
as OBSOLETE in the schema | |||
syntax | |||
String contains OID of the LDAP syntax this matching rule is usable with | |||
""" | |||
schema_attribute = u'matchingRules' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'SYNTAX':(None,), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.syntax = d['SYNTAX'][0] | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_attr('SYNTAX',self.syntax)) | |||
return '( %s )' % ''.join(result) | |||
class MatchingRuleUse(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an MatchingRuleUseDescription | |||
Class attributes: | |||
oid | |||
OID of the accompanying matching rule | |||
names | |||
This list of strings contains all NAMEs of the matching rule | |||
desc | |||
This string contains description text (DESC) of the matching rule | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the matching rule is marked | |||
as OBSOLETE in the schema | |||
applies | |||
This list of strings contains NAMEs or OIDs of attribute types | |||
for which this matching rule is used | |||
""" | |||
schema_attribute = u'matchingRuleUse' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'APPLIES':(()), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.applies = d['APPLIES'] | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_list('APPLIES',self.applies,sep=' $ ')) | |||
return '( %s )' % ''.join(result) | |||
class DITContentRule(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an DITContentRuleDescription | |||
Class attributes: | |||
oid | |||
OID of the accompanying structural object class | |||
names | |||
This list of strings contains all NAMEs of the DIT content rule | |||
desc | |||
This string contains description text (DESC) of the DIT content rule | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the DIT content rule is marked | |||
as OBSOLETE in the schema | |||
aux | |||
This list of strings contains NAMEs or OIDs of all auxiliary | |||
object classes usable in an entry of the object class | |||
must | |||
This list of strings contains NAMEs or OIDs of all attributes | |||
an entry of the object class must have which may extend the | |||
list of required attributes of the object classes of an entry | |||
may | |||
This list of strings contains NAMEs or OIDs of additional attributes | |||
an entry of the object class may have which may extend the | |||
list of optional attributes of the object classes of an entry | |||
nots | |||
This list of strings contains NAMEs or OIDs of attributes which | |||
may not be present in an entry of the object class | |||
""" | |||
schema_attribute = u'dITContentRules' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'AUX':(()), | |||
'MUST':(()), | |||
'MAY':(()), | |||
'NOT':(()), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.aux = d['AUX'] | |||
self.must = d['MUST'] | |||
self.may = d['MAY'] | |||
self.nots = d['NOT'] | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_list('AUX',self.aux,sep=' $ ')) | |||
result.append(self.key_list('MUST',self.must,sep=' $ ')) | |||
result.append(self.key_list('MAY',self.may,sep=' $ ')) | |||
result.append(self.key_list('NOT',self.nots,sep=' $ ')) | |||
return '( %s )' % ''.join(result) | |||
class DITStructureRule(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an DITStructureRuleDescription | |||
Class attributes: | |||
ruleid | |||
rule ID of the DIT structure rule (only locally unique) | |||
names | |||
This list of strings contains all NAMEs of the DIT structure rule | |||
desc | |||
This string contains description text (DESC) of the DIT structure rule | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the DIT content rule is marked | |||
as OBSOLETE in the schema | |||
form | |||
List of strings with NAMEs or OIDs of associated name forms | |||
sup | |||
List of strings with NAMEs or OIDs of allowed structural object classes | |||
of superior entries in the DIT | |||
""" | |||
schema_attribute = u'dITStructureRules' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'FORM':(None,), | |||
'SUP':(()), | |||
} | |||
def set_id(self,element_id): | |||
self.ruleid = element_id | |||
def get_id(self): | |||
return self.ruleid | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.form = d['FORM'][0] | |||
self.sup = d['SUP'] | |||
return | |||
def __str__(self): | |||
result = [str(self.ruleid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_attr('FORM',self.form,quoted=0)) | |||
result.append(self.key_list('SUP',self.sup,sep=' $ ')) | |||
return '( %s )' % ''.join(result) | |||
class NameForm(SchemaElement): | |||
""" | |||
Arguments: | |||
schema_element_str | |||
String containing an NameFormDescription | |||
Class attributes: | |||
oid | |||
OID of the name form | |||
names | |||
This list of strings contains all NAMEs of the name form | |||
desc | |||
This string contains description text (DESC) of the name form | |||
obsolete | |||
Integer flag (0 or 1) indicating whether the name form is marked | |||
as OBSOLETE in the schema | |||
form | |||
List of strings with NAMEs or OIDs of associated name forms | |||
oc | |||
String with NAME or OID of structural object classes this name form | |||
is usable with | |||
must | |||
This list of strings contains NAMEs or OIDs of all attributes | |||
an RDN must contain | |||
may | |||
This list of strings contains NAMEs or OIDs of additional attributes | |||
an RDN may contain | |||
""" | |||
schema_attribute = u'nameForms' | |||
token_defaults = { | |||
'NAME':(()), | |||
'DESC':(None,), | |||
'OBSOLETE':None, | |||
'OC':(None,), | |||
'MUST':(()), | |||
'MAY':(()), | |||
} | |||
def _set_attrs(self,l,d): | |||
self.names = d['NAME'] | |||
self.desc = d['DESC'][0] | |||
self.obsolete = d['OBSOLETE']!=None | |||
self.oc = d['OC'][0] | |||
self.must = d['MUST'] | |||
self.may = d['MAY'] | |||
return | |||
def __str__(self): | |||
result = [str(self.oid)] | |||
result.append(self.key_list('NAME',self.names,quoted=1)) | |||
result.append(self.key_attr('DESC',self.desc,quoted=1)) | |||
result.append({0:'',1:' OBSOLETE'}[self.obsolete]) | |||
result.append(self.key_attr('OC',self.oc)) | |||
result.append(self.key_list('MUST',self.must,sep=' $ ')) | |||
result.append(self.key_list('MAY',self.may,sep=' $ ')) | |||
return '( %s )' % ''.join(result) | |||
class Entry(IterableUserDict): | |||
""" | |||
Schema-aware implementation of an LDAP entry class. | |||
Mainly it holds the attributes in a string-keyed dictionary with | |||
the OID as key. | |||
""" | |||
def __init__(self,schema,dn,entry): | |||
self._keytuple2attrtype = {} | |||
self._attrtype2keytuple = {} | |||
self._s = schema | |||
self.dn = dn | |||
IterableUserDict.IterableUserDict.__init__(self,{}) | |||
self.update(entry) | |||
def _at2key(self,nameoroid): | |||
""" | |||
Return tuple of OID and all sub-types of attribute type specified | |||
in nameoroid. | |||
""" | |||
try: | |||
# Mapping already in cache | |||
return self._attrtype2keytuple[nameoroid] | |||
except KeyError: | |||
# Mapping has to be constructed | |||
oid = self._s.getoid(ldap.schema.AttributeType,nameoroid) | |||
l = nameoroid.lower().split(';') | |||
l[0] = oid | |||
t = tuple(l) | |||
self._attrtype2keytuple[nameoroid] = t | |||
return t | |||
def update(self,dict): | |||
for key, value in dict.values(): | |||
self[key] = value | |||
def __contains__(self,nameoroid): | |||
return self._at2key(nameoroid) in self.data | |||
def __getitem__(self,nameoroid): | |||
return self.data[self._at2key(nameoroid)] | |||
def __setitem__(self,nameoroid,attr_values): | |||
k = self._at2key(nameoroid) | |||
self._keytuple2attrtype[k] = nameoroid | |||
self.data[k] = attr_values | |||
def __delitem__(self,nameoroid): | |||
k = self._at2key(nameoroid) | |||
del self.data[k] | |||
del self._attrtype2keytuple[nameoroid] | |||
del self._keytuple2attrtype[k] | |||
def has_key(self,nameoroid): | |||
k = self._at2key(nameoroid) | |||
return k in self.data | |||
def keys(self): | |||
return self._keytuple2attrtype.values() | |||
def items(self): | |||
return [ | |||
(k,self[k]) | |||
for k in self.keys() | |||
] | |||
def attribute_types( | |||
self,attr_type_filter=None,raise_keyerror=1 | |||
): | |||
""" | |||
Convenience wrapper around SubSchema.attribute_types() which | |||
passes object classes of this particular entry as argument to | |||
SubSchema.attribute_types() | |||
""" | |||
return self._s.attribute_types( | |||
self.get('objectClass',[]),attr_type_filter,raise_keyerror | |||
) |
@@ -0,0 +1,498 @@ | |||
""" | |||
ldap.schema.subentry - subschema subentry handling | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import copy | |||
import ldap.cidict,ldap.schema | |||
from ldap.compat import urlopen | |||
from ldap.schema.models import * | |||
import ldapurl | |||
import ldif | |||
SCHEMA_CLASS_MAPPING = ldap.cidict.cidict() | |||
SCHEMA_ATTR_MAPPING = {} | |||
for o in list(vars().values()): | |||
if hasattr(o,'schema_attribute'): | |||
SCHEMA_CLASS_MAPPING[o.schema_attribute] = o | |||
SCHEMA_ATTR_MAPPING[o] = o.schema_attribute | |||
SCHEMA_ATTRS = SCHEMA_CLASS_MAPPING.keys() | |||
class SubschemaError(ValueError): | |||
pass | |||
class OIDNotUnique(SubschemaError): | |||
def __init__(self,desc): | |||
self.desc = desc | |||
def __str__(self): | |||
return 'OID not unique for %s' % (self.desc) | |||
class NameNotUnique(SubschemaError): | |||
def __init__(self,desc): | |||
self.desc = desc | |||
def __str__(self): | |||
return 'NAME not unique for %s' % (self.desc) | |||
class SubSchema: | |||
""" | |||
Arguments: | |||
sub_schema_sub_entry | |||
Dictionary usually returned by LDAP search or the LDIF parser | |||
containing the sub schema sub entry | |||
check_uniqueness | |||
Defines whether uniqueness of OIDs and NAME is checked. | |||
0 | |||
no check | |||
1 | |||
check but add schema description with work-around | |||
2 | |||
check and raise exception if non-unique OID or NAME is found | |||
Class attributes: | |||
sed | |||
Dictionary holding the subschema information as pre-parsed | |||
SchemaElement objects (do not access directly!) | |||
name2oid | |||
Dictionary holding the mapping from NAMEs to OIDs | |||
(do not access directly!) | |||
non_unique_oids | |||
List of OIDs used at least twice in the subschema | |||
non_unique_names | |||
List of NAMEs used at least twice in the subschema for the same schema element | |||
""" | |||
def __init__(self,sub_schema_sub_entry,check_uniqueness=1): | |||
# Initialize all dictionaries | |||
self.name2oid = {} | |||
self.sed = {} | |||
self.non_unique_oids = {} | |||
self.non_unique_names = {} | |||
for c in SCHEMA_CLASS_MAPPING.values(): | |||
self.name2oid[c] = ldap.cidict.cidict() | |||
self.sed[c] = {} | |||
self.non_unique_names[c] = ldap.cidict.cidict() | |||
# Transform entry dict to case-insensitive dict | |||
e = ldap.cidict.cidict(sub_schema_sub_entry) | |||
# Build the schema registry in dictionaries | |||
for attr_type in SCHEMA_ATTRS: | |||
for attr_value in filter(None,e.get(attr_type,[])): | |||
se_class = SCHEMA_CLASS_MAPPING[attr_type] | |||
se_instance = se_class(attr_value) | |||
se_id = se_instance.get_id() | |||
if check_uniqueness and se_id in self.sed[se_class]: | |||
self.non_unique_oids[se_id] = None | |||
if check_uniqueness==1: | |||
# Add to subschema by adding suffix to ID | |||
suffix_counter = 1 | |||
new_se_id = se_id | |||
while new_se_id in self.sed[se_class]: | |||
new_se_id = ';'.join((se_id,str(suffix_counter))) | |||
suffix_counter += 1 | |||
else: | |||
se_id = new_se_id | |||
elif check_uniqueness>=2: | |||
raise OIDNotUnique(attr_value) | |||
# Store the schema element instance in the central registry | |||
self.sed[se_class][se_id] = se_instance | |||
if hasattr(se_instance,'names'): | |||
for name in ldap.cidict.cidict({}.fromkeys(se_instance.names)).keys(): | |||
if check_uniqueness and name in self.name2oid[se_class]: | |||
self.non_unique_names[se_class][se_id] = None | |||
raise NameNotUnique(attr_value) | |||
else: | |||
self.name2oid[se_class][name] = se_id | |||
# Turn dict into list maybe more handy for applications | |||
self.non_unique_oids = self.non_unique_oids.keys() | |||
return # subSchema.__init__() | |||
def ldap_entry(self): | |||
""" | |||
Returns a dictionary containing the sub schema sub entry | |||
""" | |||
# Initialize the dictionary with empty lists | |||
entry = {} | |||
# Collect the schema elements and store them in | |||
# entry's attributes | |||
for se_class, elements in self.sed.items(): | |||
for se in elements.values(): | |||
se_str = str(se) | |||
try: | |||
entry[SCHEMA_ATTR_MAPPING[se_class]].append(se_str) | |||
except KeyError: | |||
entry[SCHEMA_ATTR_MAPPING[se_class]] = [ se_str ] | |||
return entry | |||
def listall(self,schema_element_class,schema_element_filters=None): | |||
""" | |||
Returns a list of OIDs of all available schema | |||
elements of a given schema element class. | |||
""" | |||
avail_se = self.sed[schema_element_class] | |||
if schema_element_filters: | |||
result = [] | |||
for se_key, se in avail_se.items(): | |||
for fk,fv in schema_element_filters: | |||
try: | |||
if getattr(se,fk) in fv: | |||
result.append(se_key) | |||
except AttributeError: | |||
pass | |||
else: | |||
result = avail_se.keys() | |||
return result | |||
def tree(self,schema_element_class,schema_element_filters=None): | |||
""" | |||
Returns a ldap.cidict.cidict dictionary representing the | |||
tree structure of the schema elements. | |||
""" | |||
assert schema_element_class in [ObjectClass,AttributeType] | |||
avail_se = self.listall(schema_element_class,schema_element_filters) | |||
top_node = '_' | |||
tree = ldap.cidict.cidict({top_node:[]}) | |||
# 1. Pass: Register all nodes | |||
for se in avail_se: | |||
tree[se] = [] | |||
# 2. Pass: Register all sup references | |||
for se_oid in avail_se: | |||
se_obj = self.get_obj(schema_element_class,se_oid,None) | |||
if se_obj.__class__!=schema_element_class: | |||
# Ignore schema elements not matching schema_element_class. | |||
# This helps with falsely assigned OIDs. | |||
continue | |||
assert se_obj.__class__==schema_element_class, \ | |||
"Schema element referenced by %s must be of class %s but was %s" % ( | |||
se_oid,schema_element_class.__name__,se_obj.__class__ | |||
) | |||
for s in se_obj.sup or ('_',): | |||
sup_oid = self.getoid(schema_element_class,s) | |||
try: | |||
tree[sup_oid].append(se_oid) | |||
except: | |||
pass | |||
return tree | |||
def getoid(self,se_class,nameoroid,raise_keyerror=0): | |||
""" | |||
Get an OID by name or OID | |||
""" | |||
nameoroid_stripped = nameoroid.split(';')[0].strip() | |||
if nameoroid_stripped in self.sed[se_class]: | |||
# name_or_oid is already a registered OID | |||
return nameoroid_stripped | |||
else: | |||
try: | |||
result_oid = self.name2oid[se_class][nameoroid_stripped] | |||
except KeyError: | |||
if raise_keyerror: | |||
raise KeyError('No registered %s-OID for nameoroid %s' % (se_class.__name__,repr(nameoroid_stripped))) | |||
else: | |||
result_oid = nameoroid_stripped | |||
return result_oid | |||
def get_inheritedattr(self,se_class,nameoroid,name): | |||
""" | |||
Get a possibly inherited attribute specified by name | |||
of a schema element specified by nameoroid. | |||
Returns None if class attribute is not set at all. | |||
Raises KeyError if no schema element is found by nameoroid. | |||
""" | |||
se = self.sed[se_class][self.getoid(se_class,nameoroid)] | |||
try: | |||
result = getattr(se,name) | |||
except AttributeError: | |||
result = None | |||
if result is None and se.sup: | |||
result = self.get_inheritedattr(se_class,se.sup[0],name) | |||
return result | |||
def get_obj(self,se_class,nameoroid,default=None,raise_keyerror=0): | |||
""" | |||
Get a schema element by name or OID | |||
""" | |||
se_oid = self.getoid(se_class,nameoroid) | |||
try: | |||
se_obj = self.sed[se_class][se_oid] | |||
except KeyError: | |||
if raise_keyerror: | |||
raise KeyError('No ldap.schema.%s instance with nameoroid %s and se_oid %s' % ( | |||
se_class.__name__,repr(nameoroid),repr(se_oid)) | |||
) | |||
else: | |||
se_obj = default | |||
return se_obj | |||
def get_inheritedobj(self,se_class,nameoroid,inherited=None): | |||
""" | |||
Get a schema element by name or OID with all class attributes | |||
set including inherited class attributes | |||
""" | |||
inherited = inherited or [] | |||
se = copy.copy(self.sed[se_class].get(self.getoid(se_class,nameoroid))) | |||
if se and hasattr(se,'sup'): | |||
for class_attr_name in inherited: | |||
setattr(se,class_attr_name,self.get_inheritedattr(se_class,nameoroid,class_attr_name)) | |||
return se | |||
def get_syntax(self,nameoroid): | |||
""" | |||
Get the syntax of an attribute type specified by name or OID | |||
""" | |||
at_oid = self.getoid(AttributeType,nameoroid) | |||
try: | |||
at_obj = self.get_inheritedobj(AttributeType,at_oid) | |||
except KeyError: | |||
return None | |||
else: | |||
return at_obj.syntax | |||
def get_structural_oc(self,oc_list): | |||
""" | |||
Returns OID of structural object class in oc_list | |||
if any is present. Returns None else. | |||
""" | |||
# Get tree of all STRUCTURAL object classes | |||
oc_tree = self.tree(ObjectClass,[('kind',[0])]) | |||
# Filter all STRUCTURAL object classes | |||
struct_ocs = {} | |||
for oc_nameoroid in oc_list: | |||
oc_se = self.get_obj(ObjectClass,oc_nameoroid,None) | |||
if oc_se and oc_se.kind==0: | |||
struct_ocs[oc_se.oid] = None | |||
result = None | |||
# Build a copy of the oid list, to be cleaned as we go. | |||
struct_oc_list = list(struct_ocs) | |||
while struct_oc_list: | |||
oid = struct_oc_list.pop() | |||
for child_oid in oc_tree[oid]: | |||
if self.getoid(ObjectClass,child_oid) in struct_ocs: | |||
break | |||
else: | |||
result = oid | |||
return result | |||
def get_applicable_aux_classes(self,nameoroid): | |||
""" | |||
Return a list of the applicable AUXILIARY object classes | |||
for a STRUCTURAL object class specified by 'nameoroid' | |||
if the object class is governed by a DIT content rule. | |||
If there's no DIT content rule all available AUXILIARY | |||
object classes are returned. | |||
""" | |||
content_rule = self.get_obj(DITContentRule,nameoroid) | |||
if content_rule: | |||
# Return AUXILIARY object classes from DITContentRule instance | |||
return content_rule.aux | |||
else: | |||
# list all AUXILIARY object classes | |||
return self.listall(ObjectClass,[('kind',[2])]) | |||
def attribute_types( | |||
self,object_class_list,attr_type_filter=None,raise_keyerror=1,ignore_dit_content_rule=0 | |||
): | |||
""" | |||
Returns a 2-tuple of all must and may attributes including | |||
all inherited attributes of superior object classes | |||
by walking up classes along the SUP attribute. | |||
The attributes are stored in a ldap.cidict.cidict dictionary. | |||
object_class_list | |||
list of strings specifying object class names or OIDs | |||
attr_type_filter | |||
list of 2-tuples containing lists of class attributes | |||
which has to be matched | |||
raise_keyerror | |||
All KeyError exceptions for non-existent schema elements | |||
are ignored | |||
ignore_dit_content_rule | |||
A DIT content rule governing the structural object class | |||
is ignored | |||
""" | |||
AttributeType = ldap.schema.AttributeType | |||
ObjectClass = ldap.schema.ObjectClass | |||
# Map object_class_list to object_class_oids (list of OIDs) | |||
object_class_oids = [ | |||
self.getoid(ObjectClass,o) | |||
for o in object_class_list | |||
] | |||
# Initialize | |||
oid_cache = {} | |||
r_must,r_may = ldap.cidict.cidict(),ldap.cidict.cidict() | |||
if '1.3.6.1.4.1.1466.101.120.111' in object_class_oids: | |||
# Object class 'extensibleObject' MAY carry every attribute type | |||
for at_obj in self.sed[AttributeType].values(): | |||
r_may[at_obj.oid] = at_obj | |||
# Loop over OIDs of all given object classes | |||
while object_class_oids: | |||
object_class_oid = object_class_oids.pop(0) | |||
# Check whether the objectClass with this OID | |||
# has already been processed | |||
if object_class_oid in oid_cache: | |||
continue | |||
# Cache this OID as already being processed | |||
oid_cache[object_class_oid] = None | |||
try: | |||
object_class = self.sed[ObjectClass][object_class_oid] | |||
except KeyError: | |||
if raise_keyerror: | |||
raise | |||
# Ignore this object class | |||
continue | |||
assert isinstance(object_class,ObjectClass) | |||
assert hasattr(object_class,'must'),ValueError(object_class_oid) | |||
assert hasattr(object_class,'may'),ValueError(object_class_oid) | |||
for a in object_class.must: | |||
se_oid = self.getoid(AttributeType,a,raise_keyerror=raise_keyerror) | |||
r_must[se_oid] = self.get_obj(AttributeType,se_oid,raise_keyerror=raise_keyerror) | |||
for a in object_class.may: | |||
se_oid = self.getoid(AttributeType,a,raise_keyerror=raise_keyerror) | |||
r_may[se_oid] = self.get_obj(AttributeType,se_oid,raise_keyerror=raise_keyerror) | |||
object_class_oids.extend([ | |||
self.getoid(ObjectClass,o) | |||
for o in object_class.sup | |||
]) | |||
# Process DIT content rules | |||
if not ignore_dit_content_rule: | |||
structural_oc = self.get_structural_oc(object_class_list) | |||
if structural_oc: | |||
# Process applicable DIT content rule | |||
try: | |||
dit_content_rule = self.get_obj(DITContentRule,structural_oc,raise_keyerror=1) | |||
except KeyError: | |||
# Not DIT content rule found for structural objectclass | |||
pass | |||
else: | |||
for a in dit_content_rule.must: | |||
se_oid = self.getoid(AttributeType,a,raise_keyerror=raise_keyerror) | |||
r_must[se_oid] = self.get_obj(AttributeType,se_oid,raise_keyerror=raise_keyerror) | |||
for a in dit_content_rule.may: | |||
se_oid = self.getoid(AttributeType,a,raise_keyerror=raise_keyerror) | |||
r_may[se_oid] = self.get_obj(AttributeType,se_oid,raise_keyerror=raise_keyerror) | |||
for a in dit_content_rule.nots: | |||
a_oid = self.getoid(AttributeType,a,raise_keyerror=raise_keyerror) | |||
try: | |||
del r_may[a_oid] | |||
except KeyError: | |||
pass | |||
# Remove all mandantory attribute types from | |||
# optional attribute type list | |||
for a in list(r_may.keys()): | |||
if a in r_must: | |||
del r_may[a] | |||
# Apply attr_type_filter to results | |||
if attr_type_filter: | |||
for l in [r_must,r_may]: | |||
for a in list(l.keys()): | |||
for afk,afv in attr_type_filter: | |||
try: | |||
schema_attr_type = self.sed[AttributeType][a] | |||
except KeyError: | |||
if raise_keyerror: | |||
raise KeyError('No attribute type found in sub schema by name %s' % (a)) | |||
# If there's no schema element for this attribute type | |||
# but still KeyError is to be ignored we filter it away | |||
del l[a] | |||
break | |||
else: | |||
if not getattr(schema_attr_type,afk) in afv: | |||
del l[a] | |||
break | |||
return r_must,r_may # attribute_types() | |||
def urlfetch(uri,trace_level=0): | |||
""" | |||
Fetches a parsed schema entry by uri. | |||
If uri is a LDAP URL the LDAP server is queried directly. | |||
Otherwise uri is assumed to point to a LDIF file which | |||
is loaded with urllib. | |||
""" | |||
uri = uri.strip() | |||
if uri.startswith(('ldap:', 'ldaps:', 'ldapi:')): | |||
ldap_url = ldapurl.LDAPUrl(uri) | |||
l=ldap.initialize(ldap_url.initializeUrl(),trace_level) | |||
l.protocol_version = ldap.VERSION3 | |||
l.simple_bind_s(ldap_url.who or u'', ldap_url.cred or u'') | |||
subschemasubentry_dn = l.search_subschemasubentry_s(ldap_url.dn) | |||
if subschemasubentry_dn is None: | |||
s_temp = None | |||
else: | |||
if ldap_url.attrs is None: | |||
schema_attrs = SCHEMA_ATTRS | |||
else: | |||
schema_attrs = ldap_url.attrs | |||
s_temp = l.read_subschemasubentry_s( | |||
subschemasubentry_dn,attrs=schema_attrs | |||
) | |||
l.unbind_s() | |||
del l | |||
else: | |||
ldif_file = urlopen(uri) | |||
ldif_parser = ldif.LDIFRecordList(ldif_file,max_entries=1) | |||
ldif_parser.parse() | |||
subschemasubentry_dn,s_temp = ldif_parser.all_records[0] | |||
# Work-around for mixed-cased attribute names | |||
subschemasubentry_entry = ldap.cidict.cidict() | |||
s_temp = s_temp or {} | |||
for at,av in s_temp.items(): | |||
if at in SCHEMA_CLASS_MAPPING: | |||
try: | |||
subschemasubentry_entry[at].extend(av) | |||
except KeyError: | |||
subschemasubentry_entry[at] = av | |||
# Finally parse the schema | |||
if subschemasubentry_dn!=None: | |||
parsed_sub_schema = ldap.schema.SubSchema(subschemasubentry_entry) | |||
else: | |||
parsed_sub_schema = None | |||
return subschemasubentry_dn, parsed_sub_schema |
@@ -0,0 +1,80 @@ | |||
""" | |||
ldap.schema.tokenizer - Low-level parsing functions for schema element strings | |||
See https://www.python-ldap.org/ for details. | |||
""" | |||
import re | |||
TOKENS_FINDALL = re.compile( | |||
r"(\()" # opening parenthesis | |||
r"|" # or | |||
r"(\))" # closing parenthesis | |||
r"|" # or | |||
r"([^'$()\s]+)" # string of length >= 1 without '$() or whitespace | |||
r"|" # or | |||
r"('.*?'(?!\w))" # any string or empty string surrounded by single quotes | |||
# except if right quote is succeeded by alphanumeric char | |||
r"|" # or | |||
r"([^\s]+?)", # residue, all non-whitespace strings | |||
).findall | |||
def split_tokens(s): | |||
""" | |||
Returns list of syntax elements with quotes and spaces stripped. | |||
""" | |||
parts = [] | |||
parens = 0 | |||
for opar, cpar, unquoted, quoted, residue in TOKENS_FINDALL(s): | |||
if unquoted: | |||
parts.append(unquoted) | |||
elif quoted: | |||
parts.append(quoted[1:-1]) | |||
elif opar: | |||
parens += 1 | |||
parts.append(opar) | |||
elif cpar: | |||
parens -= 1 | |||
parts.append(cpar) | |||
elif residue == '$': | |||
if not parens: | |||
raise ValueError("'$' outside parenthesis in %r" % (s)) | |||
else: | |||
raise ValueError(residue, s) | |||
if parens: | |||
raise ValueError("Unbalanced parenthesis in %r" % (s)) | |||
return parts | |||
def extract_tokens(l,known_tokens): | |||
""" | |||
Returns dictionary of known tokens with all values | |||
""" | |||
assert l[0].strip()=="(" and l[-1].strip()==")",ValueError(l) | |||
result = {} | |||
result.update(known_tokens) | |||
i = 0 | |||
l_len = len(l) | |||
while i<l_len: | |||
if l[i] in result: | |||
token = l[i] | |||
i += 1 # Consume token | |||
if i<l_len: | |||
if l[i] in result: | |||
# non-valued | |||
result[token] = (()) | |||
elif l[i]=="(": | |||
# multi-valued | |||
i += 1 # Consume left parentheses | |||
start = i | |||
while i<l_len and l[i]!=")": | |||
i += 1 | |||
result[token] = tuple(filter(lambda v:v!='$',l[start:i])) | |||
i += 1 # Consume right parentheses | |||
else: | |||
# single-valued | |||
result[token] = l[i], | |||
i += 1 # Consume single value | |||
else: | |||
i += 1 # Consume unrecognized item | |||
return result |
@@ -0,0 +1,536 @@ | |||
# -*- coding: utf-8 -*- | |||
""" | |||
ldap.syncrepl - for implementing syncrepl consumer (see RFC 4533) | |||
See https://www.python-ldap.org/ for project details. | |||
""" | |||
from uuid import UUID | |||
# Imports from pyasn1 | |||
from pyasn1.type import tag, namedtype, namedval, univ, constraint | |||
from pyasn1.codec.ber import encoder, decoder | |||
from ldap.pkginfo import __version__, __author__, __license__ | |||
from ldap.controls import RequestControl, ResponseControl, KNOWN_RESPONSE_CONTROLS | |||
__all__ = [ | |||
'SyncreplConsumer', | |||
] | |||
class SyncUUID(univ.OctetString): | |||
""" | |||
syncUUID ::= OCTET STRING (SIZE(16)) | |||
""" | |||
subtypeSpec = constraint.ValueSizeConstraint(16, 16) | |||
class SyncCookie(univ.OctetString): | |||
""" | |||
syncCookie ::= OCTET STRING | |||
""" | |||
class SyncRequestMode(univ.Enumerated): | |||
""" | |||
mode ENUMERATED { | |||
-- 0 unused | |||
refreshOnly (1), | |||
-- 2 reserved | |||
refreshAndPersist (3) | |||
}, | |||
""" | |||
namedValues = namedval.NamedValues( | |||
('refreshOnly', 1), | |||
('refreshAndPersist', 3) | |||
) | |||
subtypeSpec = univ.Enumerated.subtypeSpec + constraint.SingleValueConstraint(1, 3) | |||
class SyncRequestValue(univ.Sequence): | |||
""" | |||
syncRequestValue ::= SEQUENCE { | |||
mode ENUMERATED { | |||
-- 0 unused | |||
refreshOnly (1), | |||
-- 2 reserved | |||
refreshAndPersist (3) | |||
}, | |||
cookie syncCookie OPTIONAL, | |||
reloadHint BOOLEAN DEFAULT FALSE | |||
} | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('mode', SyncRequestMode()), | |||
namedtype.OptionalNamedType('cookie', SyncCookie()), | |||
namedtype.DefaultedNamedType('reloadHint', univ.Boolean(False)) | |||
) | |||
class SyncRequestControl(RequestControl): | |||
""" | |||
The Sync Request Control is an LDAP Control [RFC4511] where the | |||
controlType is the object identifier 1.3.6.1.4.1.4203.1.9.1.1 and the | |||
controlValue, an OCTET STRING, contains a BER-encoded | |||
syncRequestValue. The criticality field is either TRUE or FALSE. | |||
[..] | |||
The Sync Request Control is only applicable to the SearchRequest | |||
Message. | |||
""" | |||
controlType = '1.3.6.1.4.1.4203.1.9.1.1' | |||
def __init__(self, criticality=1, cookie=None, mode='refreshOnly', reloadHint=False): | |||
self.criticality = criticality | |||
self.cookie = cookie | |||
self.mode = mode | |||
self.reloadHint = reloadHint | |||
def encodeControlValue(self): | |||
rcv = SyncRequestValue() | |||
rcv.setComponentByName('mode', SyncRequestMode(self.mode)) | |||
if self.cookie is not None: | |||
rcv.setComponentByName('cookie', SyncCookie(self.cookie)) | |||
if self.reloadHint: | |||
rcv.setComponentByName('reloadHint', univ.Boolean(self.reloadHint)) | |||
return encoder.encode(rcv) | |||
class SyncStateOp(univ.Enumerated): | |||
""" | |||
state ENUMERATED { | |||
present (0), | |||
add (1), | |||
modify (2), | |||
delete (3) | |||
}, | |||
""" | |||
namedValues = namedval.NamedValues( | |||
('present', 0), | |||
('add', 1), | |||
('modify', 2), | |||
('delete', 3) | |||
) | |||
subtypeSpec = univ.Enumerated.subtypeSpec + constraint.SingleValueConstraint(0, 1, 2, 3) | |||
class SyncStateValue(univ.Sequence): | |||
""" | |||
syncStateValue ::= SEQUENCE { | |||
state ENUMERATED { | |||
present (0), | |||
add (1), | |||
modify (2), | |||
delete (3) | |||
}, | |||
entryUUID syncUUID, | |||
cookie syncCookie OPTIONAL | |||
} | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType('state', SyncStateOp()), | |||
namedtype.NamedType('entryUUID', SyncUUID()), | |||
namedtype.OptionalNamedType('cookie', SyncCookie()) | |||
) | |||
class SyncStateControl(ResponseControl): | |||
""" | |||
The Sync State Control is an LDAP Control [RFC4511] where the | |||
controlType is the object identifier 1.3.6.1.4.1.4203.1.9.1.2 and the | |||
controlValue, an OCTET STRING, contains a BER-encoded SyncStateValue. | |||
The criticality is FALSE. | |||
[..] | |||
The Sync State Control is only applicable to SearchResultEntry and | |||
SearchResultReference Messages. | |||
""" | |||
controlType = '1.3.6.1.4.1.4203.1.9.1.2' | |||
opnames = ('present', 'add', 'modify', 'delete') | |||
def decodeControlValue(self, encodedControlValue): | |||
d = decoder.decode(encodedControlValue, asn1Spec=SyncStateValue()) | |||
state = d[0].getComponentByName('state') | |||
uuid = UUID(bytes=bytes(d[0].getComponentByName('entryUUID'))) | |||
cookie = d[0].getComponentByName('cookie') | |||
if cookie is not None and cookie.hasValue(): | |||
self.cookie = str(cookie) | |||
else: | |||
self.cookie = None | |||
self.state = self.__class__.opnames[int(state)] | |||
self.entryUUID = str(uuid) | |||
KNOWN_RESPONSE_CONTROLS[SyncStateControl.controlType] = SyncStateControl | |||
class SyncDoneValue(univ.Sequence): | |||
""" | |||
syncDoneValue ::= SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDeletes BOOLEAN DEFAULT FALSE | |||
} | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.OptionalNamedType('cookie', SyncCookie()), | |||
namedtype.DefaultedNamedType('refreshDeletes', univ.Boolean(False)) | |||
) | |||
class SyncDoneControl(ResponseControl): | |||
""" | |||
The Sync Done Control is an LDAP Control [RFC4511] where the | |||
controlType is the object identifier 1.3.6.1.4.1.4203.1.9.1.3 and the | |||
controlValue contains a BER-encoded syncDoneValue. The criticality | |||
is FALSE (and hence absent). | |||
[..] | |||
The Sync Done Control is only applicable to the SearchResultDone | |||
Message. | |||
""" | |||
controlType = '1.3.6.1.4.1.4203.1.9.1.3' | |||
def decodeControlValue(self, encodedControlValue): | |||
d = decoder.decode(encodedControlValue, asn1Spec=SyncDoneValue()) | |||
cookie = d[0].getComponentByName('cookie') | |||
if cookie.hasValue(): | |||
self.cookie = str(cookie) | |||
else: | |||
self.cookie = None | |||
refresh_deletes = d[0].getComponentByName('refreshDeletes') | |||
if refresh_deletes.hasValue(): | |||
self.refreshDeletes = bool(refresh_deletes) | |||
else: | |||
self.refreshDeletes = None | |||
KNOWN_RESPONSE_CONTROLS[SyncDoneControl.controlType] = SyncDoneControl | |||
class RefreshDelete(univ.Sequence): | |||
""" | |||
refreshDelete [1] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDone BOOLEAN DEFAULT TRUE | |||
}, | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.OptionalNamedType('cookie', SyncCookie()), | |||
namedtype.DefaultedNamedType('refreshDone', univ.Boolean(True)) | |||
) | |||
class RefreshPresent(univ.Sequence): | |||
""" | |||
refreshPresent [2] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDone BOOLEAN DEFAULT TRUE | |||
}, | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.OptionalNamedType('cookie', SyncCookie()), | |||
namedtype.DefaultedNamedType('refreshDone', univ.Boolean(True)) | |||
) | |||
class SyncUUIDs(univ.SetOf): | |||
""" | |||
syncUUIDs SET OF syncUUID | |||
""" | |||
componentType = SyncUUID() | |||
class SyncIdSet(univ.Sequence): | |||
""" | |||
syncIdSet [3] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDeletes BOOLEAN DEFAULT FALSE, | |||
syncUUIDs SET OF syncUUID | |||
} | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.OptionalNamedType('cookie', SyncCookie()), | |||
namedtype.DefaultedNamedType('refreshDeletes', univ.Boolean(False)), | |||
namedtype.NamedType('syncUUIDs', SyncUUIDs()) | |||
) | |||
class SyncInfoValue(univ.Choice): | |||
""" | |||
syncInfoValue ::= CHOICE { | |||
newcookie [0] syncCookie, | |||
refreshDelete [1] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDone BOOLEAN DEFAULT TRUE | |||
}, | |||
refreshPresent [2] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDone BOOLEAN DEFAULT TRUE | |||
}, | |||
syncIdSet [3] SEQUENCE { | |||
cookie syncCookie OPTIONAL, | |||
refreshDeletes BOOLEAN DEFAULT FALSE, | |||
syncUUIDs SET OF syncUUID | |||
} | |||
} | |||
""" | |||
componentType = namedtype.NamedTypes( | |||
namedtype.NamedType( | |||
'newcookie', | |||
SyncCookie().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0) | |||
) | |||
), | |||
namedtype.NamedType( | |||
'refreshDelete', | |||
RefreshDelete().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1) | |||
) | |||
), | |||
namedtype.NamedType( | |||
'refreshPresent', | |||
RefreshPresent().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2) | |||
) | |||
), | |||
namedtype.NamedType( | |||
'syncIdSet', | |||
SyncIdSet().subtype( | |||
implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3) | |||
) | |||
) | |||
) | |||
class SyncInfoMessage: | |||
""" | |||
The Sync Info Message is an LDAP Intermediate Response Message | |||
[RFC4511] where responseName is the object identifier | |||
1.3.6.1.4.1.4203.1.9.1.4 and responseValue contains a BER-encoded | |||
syncInfoValue. The criticality is FALSE (and hence absent). | |||
""" | |||
responseName = '1.3.6.1.4.1.4203.1.9.1.4' | |||
def __init__(self, encodedMessage): | |||
d = decoder.decode(encodedMessage, asn1Spec=SyncInfoValue()) | |||
self.newcookie = None | |||
self.refreshDelete = None | |||
self.refreshPresent = None | |||
self.syncIdSet = None | |||
for attr in ['newcookie', 'refreshDelete', 'refreshPresent', 'syncIdSet']: | |||
comp = d[0].getComponentByName(attr) | |||
if comp is not None and comp.hasValue(): | |||
if attr == 'newcookie': | |||
self.newcookie = str(comp) | |||
return | |||
val = {} | |||
cookie = comp.getComponentByName('cookie') | |||
if cookie.hasValue(): | |||
val['cookie'] = str(cookie) | |||
if attr.startswith('refresh'): | |||
val['refreshDone'] = bool(comp.getComponentByName('refreshDone')) | |||
elif attr == 'syncIdSet': | |||
uuids = [] | |||
ids = comp.getComponentByName('syncUUIDs') | |||
for i in range(len(ids)): | |||
uuid = UUID(bytes=bytes(ids.getComponentByPosition(i))) | |||
uuids.append(str(uuid)) | |||
val['syncUUIDs'] = uuids | |||
val['refreshDeletes'] = bool(comp.getComponentByName('refreshDeletes')) | |||
setattr(self, attr, val) | |||
return | |||
class SyncreplConsumer: | |||
""" | |||
SyncreplConsumer - LDAP syncrepl consumer object. | |||
""" | |||
def syncrepl_search(self, base, scope, mode='refreshOnly', cookie=None, **search_args): | |||
""" | |||
Starts syncrepl search operation. | |||
base, scope, and search_args are passed along to | |||
self.search_ext unmodified (aside from adding a Sync | |||
Request control to any serverctrls provided). | |||
mode provides syncrepl mode. Can be 'refreshOnly' | |||
to finish after synchronization, or | |||
'refreshAndPersist' to persist (continue to | |||
receive updates) after synchronization. | |||
cookie: an opaque value representing the replication | |||
state of the client. Subclasses should override | |||
the syncrepl_set_cookie() and syncrepl_get_cookie() | |||
methods to store the cookie appropriately, rather than | |||
passing it. | |||
Only a single syncrepl search may be active on a SyncreplConsumer | |||
object. Multiple concurrent syncrepl searches require multiple | |||
separate SyncreplConsumer objects and thus multiple connections | |||
(LDAPObject instances). | |||
""" | |||
if cookie is None: | |||
cookie = self.syncrepl_get_cookie() | |||
syncreq = SyncRequestControl(cookie=cookie, mode=mode) | |||
if 'serverctrls' in search_args: | |||
search_args['serverctrls'] += [syncreq] | |||
else: | |||
search_args['serverctrls'] = [syncreq] | |||
self.__refreshDone = False | |||
return self.search_ext(base, scope, **search_args) | |||
def syncrepl_poll(self, msgid=-1, timeout=None, all=0): | |||
""" | |||
polls for and processes responses to the syncrepl_search() operation. | |||
Returns False when operation finishes, True if it is in progress, or | |||
raises an exception on error. | |||
If timeout is specified, raises ldap.TIMEOUT in the event of a timeout. | |||
If all is set to a nonzero value, poll() will return only when finished | |||
or when an exception is raised. | |||
""" | |||
while True: | |||
type, msg, mid, ctrls, n, v = self.result4( | |||
msgid=msgid, | |||
timeout=timeout, | |||
add_intermediates=1, | |||
add_ctrls=1, | |||
all=0, | |||
) | |||
if type == 101: | |||
# search result. This marks the end of a refreshOnly session. | |||
# look for a SyncDone control, save the cookie, and if necessary | |||
# delete non-present entries. | |||
for c in ctrls: | |||
if c.__class__.__name__ != 'SyncDoneControl': | |||
continue | |||
self.syncrepl_present(None, refreshDeletes=c.refreshDeletes) | |||
if c.cookie is not None: | |||
self.syncrepl_set_cookie(c.cookie) | |||
return False | |||
elif type == 100: | |||
# search entry with associated SyncState control | |||
for m in msg: | |||
dn, attrs, ctrls = m | |||
for c in ctrls: | |||
if c.__class__.__name__ != 'SyncStateControl': | |||
continue | |||
if c.state == 'present': | |||
self.syncrepl_present([c.entryUUID]) | |||
elif c.state == 'delete': | |||
self.syncrepl_delete([c.entryUUID]) | |||
else: | |||
self.syncrepl_entry(dn, attrs, c.entryUUID) | |||
if self.__refreshDone is False: | |||
self.syncrepl_present([c.entryUUID]) | |||
if c.cookie is not None: | |||
self.syncrepl_set_cookie(c.cookie) | |||
break | |||
elif type == 121: | |||
# Intermediate message. If it is a SyncInfoMessage, parse it | |||
for m in msg: | |||
rname, resp, ctrls = m | |||
if rname != SyncInfoMessage.responseName: | |||
continue | |||
sim = SyncInfoMessage(resp) | |||
if sim.newcookie is not None: | |||
self.syncrepl_set_cookie(sim.newcookie) | |||
elif sim.refreshPresent is not None: | |||
self.syncrepl_present(None, refreshDeletes=False) | |||
if 'cookie' in sim.refreshPresent: | |||
self.syncrepl_set_cookie(sim.refreshPresent['cookie']) | |||
if sim.refreshPresent['refreshDone']: | |||
self.__refreshDone = True | |||
self.syncrepl_refreshdone() | |||
elif sim.refreshDelete is not None: | |||
self.syncrepl_present(None, refreshDeletes=True) | |||
if 'cookie' in sim.refreshDelete: | |||
self.syncrepl_set_cookie(sim.refreshDelete['cookie']) | |||
if sim.refreshDelete['refreshDone']: | |||
self.__refreshDone = True | |||
self.syncrepl_refreshdone() | |||
elif sim.syncIdSet is not None: | |||
if sim.syncIdSet['refreshDeletes'] is True: | |||
self.syncrepl_delete(sim.syncIdSet['syncUUIDs']) | |||
else: | |||
self.syncrepl_present(sim.syncIdSet['syncUUIDs']) | |||
if 'cookie' in sim.syncIdSet: | |||
self.syncrepl_set_cookie(sim.syncIdSet['cookie']) | |||
if all == 0: | |||
return True | |||
# virtual methods -- subclass must override these to do useful work | |||
def syncrepl_set_cookie(self, cookie): | |||
""" | |||
Called by syncrepl_poll() to store a new cookie provided by the server. | |||
""" | |||
pass | |||
def syncrepl_get_cookie(self): | |||
""" | |||
Called by syncrepl_search() to retrieve the cookie stored by syncrepl_set_cookie() | |||
""" | |||
pass | |||
def syncrepl_present(self, uuids, refreshDeletes=False): | |||
""" | |||
Called by syncrepl_poll() whenever entry UUIDs are presented to the client. | |||
syncrepl_present() is given a list of entry UUIDs (uuids) and a flag | |||
(refreshDeletes) which indicates whether the server explicitly deleted | |||
non-present entries during the refresh operation. | |||
If called with a list of uuids, the syncrepl_present() implementation | |||
should record those uuids as present in the directory. | |||
If called with uuids set to None and refreshDeletes set to False, | |||
syncrepl_present() should delete all non-present entries from the local | |||
mirror, and reset the list of recorded uuids. | |||
If called with uuids set to None and refreshDeletes set to True, | |||
syncrepl_present() should reset the list of recorded uuids, without | |||
deleting any entries. | |||
""" | |||
pass | |||
def syncrepl_delete(self, uuids): | |||
""" | |||
Called by syncrepl_poll() to delete entries. A list | |||
of UUIDs of the entries to be deleted is given in the | |||
uuids parameter. | |||
""" | |||
pass | |||
def syncrepl_entry(self, dn, attrs, uuid): | |||
""" | |||
Called by syncrepl_poll() for any added or modified entries. | |||
The provided uuid is used to identify the provided entry in | |||
any future modification (including dn modification), deletion, | |||
and presentation operations. | |||
""" | |||
pass | |||
def syncrepl_refreshdone(self): | |||
""" | |||
Called by syncrepl_poll() between refresh and persist phase. | |||
It indicates that initial synchronization is done and persist phase | |||
follows. | |||
""" | |||
pass |
@@ -0,0 +1 @@ | |||
pip |
@@ -0,0 +1,149 @@ | |||
Metadata-Version: 2.1 | |||
Name: ldap3 | |||
Version: 2.5 | |||
Summary: A strictly RFC 4510 conforming LDAP V3 pure Python client library | |||
Home-page: https://github.com/cannatag/ldap3 | |||
Author: Giovanni Cannata | |||
Author-email: cannatag@gmail.com | |||
License: LGPL v3 | |||
Keywords: python3 python2 ldap | |||
Platform: UNKNOWN | |||
Classifier: Development Status :: 5 - Production/Stable | |||
Classifier: Intended Audience :: Developers | |||
Classifier: Intended Audience :: System Administrators | |||
Classifier: Operating System :: MacOS :: MacOS X | |||
Classifier: Operating System :: Microsoft :: Windows | |||
Classifier: Operating System :: POSIX :: Linux | |||
Classifier: License :: OSI Approved :: GNU Lesser General Public License v3 (LGPLv3) | |||
Classifier: Programming Language :: Python | |||
Classifier: Programming Language :: Python :: 2 | |||
Classifier: Programming Language :: Python :: 3 | |||
Classifier: Topic :: Software Development :: Libraries :: Python Modules | |||
Classifier: Topic :: System :: Systems Administration :: Authentication/Directory :: LDAP | |||
Requires-Dist: pyasn1 (>=0.1.8) | |||
LDAP3 | |||
===== | |||
.. image:: https://img.shields.io/pypi/v/ldap3.svg | |||
:target: https://pypi.python.org/pypi/ldap3/ | |||
:alt: Latest Version | |||
.. image:: https://img.shields.io/pypi/l/ldap3.svg | |||
:target: https://pypi.python.org/pypi/ldap3/ | |||
:alt: License | |||
.. image:: https://img.shields.io/travis/cannatag/ldap3/master.svg | |||
:target: https://travis-ci.org/cannatag/ldap3 | |||
:alt: TRAVIS-CI build status for master branch | |||
ldap3 is a strictly RFC 4510 conforming **LDAP V3 pure Python client** library. The same codebase runs in Python 2, Python 3, PyPy and PyPy3. | |||
Version 2 warning | |||
----------------- | |||
In version 2 of ldap3 some default values have been changed and the ldap3 namespace has been decluttered, removing redundant | |||
constants (look at the changelog for details). Also, the result code constants were moved to ldap3.core.results and the ldap3 custom exceptions | |||
were stored in ldap3.core.exceptions. If you experience errors in your existing code you should rearrange the import statements or explicitly | |||
set the defaults to their former values. | |||
A more pythonic LDAP | |||
-------------------- | |||
LDAP operations look clumsy and hard-to-use because they reflect the old-age idea that time-consuming operations should be performed client-side | |||
to not hog the server with heavy elaborations. To alleviate this ldap3 includes a fully functional **Abstraction Layer** that lets you | |||
interact with the LDAP server in a modern and *pythonic* way. With the Abstraction Layer you don't need to directly issue any LDAP operation at all. | |||
Home Page | |||
--------- | |||
Project home page is https://github.com/cannatag/ldap3 | |||
Documentation | |||
------------- | |||
Documentation is available at http://ldap3.readthedocs.io | |||
License | |||
------- | |||
The ldap3 project is open source software released under the **LGPL v3 license**. | |||
Copyright 2013 - 2018 Giovanni Cannata | |||
PEP8 Compliance | |||
--------------- | |||
ldap3 is PEP8 compliant, except for line length. | |||
Download | |||
-------- | |||
Package download is available at https://pypi.python.org/pypi/ldap3. | |||
Install | |||
------- | |||
Install with **pip install ldap3** | |||
Git repository | |||
-------------- | |||
You can download the latest source at https://github.com/cannatag/ldap3 | |||
Continuous integration | |||
---------------------- | |||
Continuous integration for testing is at https://travis-ci.org/cannatag/ldap3 | |||
Support | |||
------- | |||
You can submit support tickets on https://github.com/cannatag/ldap3/issues/new | |||
You can submit pull request on the **dev** branch at https://github.com/cannatag/ldap3/tree/dev | |||
Thanks to | |||
--------- | |||
* **Ilya Etingof**, the author of the *pyasn1* package for his excellent work and support. | |||
* **Mark Lutz** for his *Learning Python* and *Programming Python* excellent books series and **John Goerzen** and **Brandon Rhodes** for their book *Foundations of Python Network Programming*. These books are wonderful tools for learning Python and this project owes a lot to them. | |||
* **JetBrains** for donating to this project the Open Source license of *PyCharm Professional*. | |||
* **GitHub** for providing the *free source repository space and the tools* I use to develop this project. | |||
* The **FreeIPA** team for letting me use their demo LDAP server in the ldap3 tutorial. | |||
Contact me | |||
---------- | |||
For information and suggestions you can contact me at cannatag@gmail.com. You can also open a support ticket on https://github.com/cannatag/ldap3/issues/new | |||
Donate | |||
------ | |||
If you want to keep this project up and running you can send me an Amazon gift card. I will use it to improve my skills in the Information and Communication technology. | |||
Changelog | |||
--------- | |||
Updated changelog at https://ldap3.readthedocs.io/changelog.html | |||
@@ -0,0 +1,215 @@ | |||
ldap3/__init__.py,sha256=y8Bwq-3LFZvNnq_8B9IsGWFCG4AK2t08lGHWfTqgXP0,4129 | |||
ldap3/version.py,sha256=ommyVm3ssFqU9cXOvdU1_GEOR580YlL3-sW0yvWTPBY,684 | |||
ldap3/abstract/__init__.py,sha256=SjLzFchn_GXzh8piUYQElcRHCg3sUBRr5qB0me7w6d8,2166 | |||
ldap3/abstract/attrDef.py,sha256=_KnBfzdDtI4HXLGXcffRx5Ca1wsYqwC_-ylDDoso1qc,4983 | |||
ldap3/abstract/attribute.py,sha256=sMghD84wu-JvX-CNRcXGSzKnCfNN_q5jkH1mnOs3QOY,12598 | |||
ldap3/abstract/cursor.py,sha256=oi4g2ExXySrcDN3i-JUNRB6IjTYCxGNta61iRiS5Dig,44095 | |||
ldap3/abstract/entry.py,sha256=rfoMOvDGjCvvDi6V-W5RmzLs4PoHw9PwOOBGd9BwdBo,34312 | |||
ldap3/abstract/objectDef.py,sha256=MBUd0W3GjeuKLRtW39oiY3VbZ6LiYLzJNVMqQGTKxws,11808 | |||
ldap3/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/core/connection.py,sha256=HoR68BgpqMVRswcgf28GQgWCy3i99JFxobPFDTYX4tk,77348 | |||
ldap3/core/exceptions.py,sha256=1Thc7Am0KwRgmxDYNiBVMjjFeaOfTOy4Pc5if2C-Izc,17059 | |||
ldap3/core/pooling.py,sha256=S43y_B23EFJT3O56XUTH3z1qY_Msoor2xGuD-eEMaOw,14131 | |||
ldap3/core/results.py,sha256=GORuTTO7jIenzNO_8xht4jz6ya1tcMWkwRky6yV4Pqg,5508 | |||
ldap3/core/server.py,sha256=WgO629ZOFhJW9ZK8TXk8I_5kiwYOrTWgxymwGdYx2jE,28554 | |||
ldap3/core/timezone.py,sha256=Q2juioasABK3X5x2kJNxNfBxbqMx0S7EG6n0JR3qjPA,1620 | |||
ldap3/core/tls.py,sha256=QhVwawW2MBpDE0hYYT6FY7kMzH5Hvttb3xOtoWY3tMs,15497 | |||
ldap3/core/usage.py,sha256=6ZR81aSmt5xsF5f5ychtWyR5ByFcOkpFnm2MWAFnpcY,10690 | |||
ldap3/extend/__init__.py,sha256=mMQcu3Bcuh3uVZ0xdDo4vWsEKGfkYPY2vLKeD3Hq53w,12677 | |||
ldap3/extend/operation.py,sha256=OMg9Cfg6CRhSQ-H6zuZ0U-twaQYUWt6dzcq8ja11yUg,3734 | |||
ldap3/extend/microsoft/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/extend/microsoft/addMembersToGroups.py,sha256=DnQe2PTuJ7jzIHGo06qDYOtut8PRyoitJoKX5XvqeUs,3392 | |||
ldap3/extend/microsoft/dirSync.py,sha256=K7ZqGhn0xX9rvbnxvwAwhvHlKDQ_gHlSweZuStnzhPw,4108 | |||
ldap3/extend/microsoft/modifyPassword.py,sha256=KwI49Pv3tX2Bc1BzDKMb7VVtE5m2mMEk9rNJ27kob1s,3058 | |||
ldap3/extend/microsoft/removeMembersFromGroups.py,sha256=hUeE5wQE9O-vZ0107A0iTPxF-fg7Y1K9bthHAvuzNYE,3702 | |||
ldap3/extend/microsoft/unlockAccount.py,sha256=rM9JEZSk4xiL6KBHzVLmaBrHnwgLAX8gDyNN1cuaJeY,2104 | |||
ldap3/extend/novell/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/extend/novell/addMembersToGroups.py,sha256=f7tH7wjnheJlJ0C24hhLnF9rWICPxBUwIer5KCUgC9o,7593 | |||
ldap3/extend/novell/checkGroupsMemberships.py,sha256=kVCUzR2nLsJ2Oj1HKv7XUKU9mIVeEBUZIZTcPrRILvM,7730 | |||
ldap3/extend/novell/endTransaction.py,sha256=XQx8OXHfe7c69_Gy6J_B1BbPsd6xDKfreek2ZwjrOd4,2252 | |||
ldap3/extend/novell/getBindDn.py,sha256=fZodgFsb4StlbQqVew0hxgxI2Lq2rrgPHDUlvx8oo2o,1422 | |||
ldap3/extend/novell/listReplicas.py,sha256=9J57u02qZb1dWYLQoTLyysAQ3v-LqQrSYtGCc5ipnqo,1856 | |||
ldap3/extend/novell/nmasGetUniversalPassword.py,sha256=_wtmWazGkVGPMNyq3K03CtYMFwhcRD8StrYVsxhFojs,2225 | |||
ldap3/extend/novell/nmasSetUniversalPassword.py,sha256=tj27EA9ureYZypk8J8RXt6lIpWavE7B68DtQQewA7_I,2077 | |||
ldap3/extend/novell/partition_entry_count.py,sha256=3MPDjYelnufnS-Z8GNnJQwAcIRR8jqx5jWs2wTCe51I,2077 | |||
ldap3/extend/novell/removeMembersFromGroups.py,sha256=IY1lZROZv6h9iq_SajAnhhl7lQdOAW-2fq7cKHIX5AQ,7683 | |||
ldap3/extend/novell/replicaInfo.py,sha256=FqloA0AAYldUir2qBWYuWd5IkhljeTAD9upAOcg0Ma0,3391 | |||
ldap3/extend/novell/startTransaction.py,sha256=8aCHtIoVm1a6T9gT3JVHdQTQaNzWlI4zJwy1RuQpWgU,2293 | |||
ldap3/extend/standard/PagedSearch.py,sha256=xAiDnnnPsSGuiYyR75K135vPTGE8xciMbVxel2ZaUZI,5262 | |||
ldap3/extend/standard/PersistentSearch.py,sha256=__5zYtYCjK-BahCoKcVE4K5aRsAkJa72NEoJlvwIxzQ,4832 | |||
ldap3/extend/standard/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/extend/standard/modifyPassword.py,sha256=2AW-kLW6x0d2kRLxC12-U9tAkr7sBaPBI1oOSkM53Lg,3516 | |||
ldap3/extend/standard/whoAmI.py,sha256=DVz_CBR_Aa5wReFENbi25Jwa1W8drkOP01dvB9N2OZY,1389 | |||
ldap3/operation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/operation/abandon.py,sha256=KbZZh8mEiLiI74pzmsoAGgdXOAYwSAjkzYqO7qUBS_0,1139 | |||
ldap3/operation/add.py,sha256=yf2Vk8DXPCgmUvZsOGzDTP2n5DZdN27jv-m2VebAa1w,2983 | |||
ldap3/operation/bind.py,sha256=zL2WvoGNxPFc_N5vaFsXbCrUPc9_lN-YiAic1fYSp3I,7722 | |||
ldap3/operation/compare.py,sha256=wb95kHzCSW2yzyqKZq069KOp4Z9HJPlXbL4-lQKwFZY,2467 | |||
ldap3/operation/delete.py,sha256=39KVqto4SN1RfTPn5sviQbRshMegkRMtmO8TjwtgE-k,1527 | |||
ldap3/operation/extended.py,sha256=iiSk3rJc9StRDH4MlWUsqYWDJ8AEyjVIobnrsgAHWIM,4861 | |||
ldap3/operation/modify.py,sha256=lorJRAkpYJ7eUjUUlrY75ViIKRby0CvEh2TO9mD0nmg,3927 | |||
ldap3/operation/modifyDn.py,sha256=wUYCLQHY9FQH_Ez_Zr6DVto_GYbp2VaXMR3Nf-NQkQE,2358 | |||
ldap3/operation/search.py,sha256=6fjlO7YrLgR6xmxPJ9aQIBrxoaPy6-wtL17rtGCOFzU,28048 | |||
ldap3/operation/unbind.py,sha256=H5vJQONvIaeHG9hTVSdeHpH6JiYzBQ_WHndAGDlPaFo,1012 | |||
ldap3/protocol/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/protocol/controls.py,sha256=rX5fh32rXRB8WB7iWUFdxGo2D1UEJRBGcQonpREDO2I,1392 | |||
ldap3/protocol/convert.py,sha256=I0Wii14CeUXFMvezitGOkHqUlB4gYUMa34nubhqXfKU,9456 | |||
ldap3/protocol/microsoft.py,sha256=FYmfIbxRUsmh6acWo8rr3JHdjJquemW4l6p5cEiWXtk,5429 | |||
ldap3/protocol/novell.py,sha256=UvKadtYTaFWyTZg4j5NYA9NY-kNNbndNI3wiTpDtTQk,5157 | |||
ldap3/protocol/oid.py,sha256=3ZAxWr36Uv-KTQN56nPzVSTnOq0v6T-qkPYRW7UgFhQ,127654 | |||
ldap3/protocol/persistentSearch.py,sha256=F-po7N8e1Fx5H7EPSCm6-a6RGlMO4S-p3A7JFzscUQk,3177 | |||
ldap3/protocol/rfc2696.py,sha256=92n3hvSWbS0l5t6uJmQBC04nx3d6IixOk35_O8kcU1c,2267 | |||
ldap3/protocol/rfc2849.py,sha256=Mh_s5A89okrpX1mJbgFYV2dRGlleXRhfbjwKxI5LhUk,10477 | |||
ldap3/protocol/rfc3062.py,sha256=agvRijIdem8WNQO7C_nViuDCp1j2J3E-Cr_u2S-rC4k,2955 | |||
ldap3/protocol/rfc4511.py,sha256=wwUqQdQVRM3ffojGWzvqS3-5z4ARThl0o54Gnv3JotQ,42545 | |||
ldap3/protocol/rfc4512.py,sha256=jv0J4HQUijFPI8bZREoGJqFv3Albbu6ppkJIk-_xjFo,38737 | |||
ldap3/protocol/rfc4527.py,sha256=6uIuSzw2dNkJaCtXw3W0axUOVhnaw94yaCmIG4s21Uc,2078 | |||
ldap3/protocol/formatters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/protocol/formatters/formatters.py,sha256=25HckFVfyqb8cb7TqYQkCT5EXdDgn6pGl7_HWeyVkoQ,15162 | |||
ldap3/protocol/formatters/standard.py,sha256=7Hlv3Lypt9Q1a6Y5mDx7hGrOJvaUN6NPGiTSxx4qQaI,14954 | |||
ldap3/protocol/formatters/validators.py,sha256=0B9UQXTT8Zj7IDmHQfATmxsJbhg1fIInKrnIRkGOGCE,14704 | |||
ldap3/protocol/sasl/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/protocol/sasl/digestMd5.py,sha256=01nkAj3MfL-NRvOoo2qmDS7v-hh4s8771IMkBu_3dx8,5382 | |||
ldap3/protocol/sasl/external.py,sha256=0L_Gqc6FcL9KNFYcgWjuHeXubgISvfKgK3hzm2v3mAA,1051 | |||
ldap3/protocol/sasl/kerberos.py,sha256=EtbW5Z_WA1i031dN8iYTfNTUuV8KocHANS4DRiehRr4,5038 | |||
ldap3/protocol/sasl/plain.py,sha256=1jTwPbkmqtLMzG2uxuX1WBWR25DMM_MOxY6m-qFxkwU,2235 | |||
ldap3/protocol/sasl/sasl.py,sha256=0NxB_y1m24HMTsmPTXL3-EVUfjjcPu2KZ4dS9QKFmZM,7309 | |||
ldap3/protocol/schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/protocol/schemas/ad2012R2.py,sha256=0OKiRtDlt7eMuDtLb-2BLmZcOUmJ-X0kgaZuxDLvU9o,333672 | |||
ldap3/protocol/schemas/ds389.py,sha256=pJvqOKjZpD12fNtxdOylwHDzRvwNLlvqta6tsx6FbHU,310500 | |||
ldap3/protocol/schemas/edir888.py,sha256=ZZv8LFCK-5a-Xy-X6nNktTCbiMtyq29mWBHgNWpu6Ek,178049 | |||
ldap3/protocol/schemas/slapd24.py,sha256=xzTijWlh2ClIqt2YiCY9WNA7ewMD9bqhH0OG0OkVSHU,129245 | |||
ldap3/strategy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/strategy/asyncStream.py,sha256=rR-QwtFZubh_1oM9p_Zc9HtP-Cqp-8XNupsTynAHSz0,4460 | |||
ldap3/strategy/asynchronous.py,sha256=CkfnrzjEpHv8jxQ5B9uINgy4J36nXSnE5hcLDW3HntI,10715 | |||
ldap3/strategy/base.py,sha256=9NmLXgTJ6pV-oQwU4iZg0NSs5RZQ6CM7u38b6u2H8_o,48078 | |||
ldap3/strategy/ldifProducer.py,sha256=AV7PPwXUWuPYFf0TYFT2fJTFhu2CTXsjb1aZ7NN8sxw,5702 | |||
ldap3/strategy/mockAsync.py,sha256=rjSjGUm6pHCzvRwafDfcNwPFjROTYi0kgp8cUgfNSCc,10259 | |||
ldap3/strategy/mockBase.py,sha256=hA9GG0UOAoDWOAfHbq49xhAF9v2dS0Gjn4SHsANvgkE,44889 | |||
ldap3/strategy/mockSync.py,sha256=2xRerRhLnABNN7M7EvcsgDsfMnZn5897Vgvel2RGFM8,7315 | |||
ldap3/strategy/restartable.py,sha256=R5Hd8YJt_lUPkIi5JGezOnNsMzJRCwdcLf6Uu-vlnr0,12930 | |||
ldap3/strategy/reusable.py,sha256=J9DOpiWaxe6iGpf0vCHCK_k_L6AaL_1VqHNp-3T8E1A,25232 | |||
ldap3/strategy/sync.py,sha256=4_4ilTCBCl8MOPsKxADxr3i98SpnEujedogKD29dPdQ,10782 | |||
ldap3/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 | |||
ldap3/utils/asn1.py,sha256=kST57JjX7ZJ6InWv73vjsBKlgRpgSu_dCCc4suhwxsU,9304 | |||
ldap3/utils/ciDict.py,sha256=OfmgdAQeBahpdtceRMWqGkfS__JU1I5KdOhUzSHvtBA,7400 | |||
ldap3/utils/config.py,sha256=7gEkg7rO8FLR7xPS-CjySlFBMqOQ8XcCOEmkFEE5Nm4,13370 | |||
ldap3/utils/conv.py,sha256=TQnBDEZ8cuhQ-u9TIlPkYvTc_XkBXaWpFhqr55VvQmU,8267 | |||
ldap3/utils/dn.py,sha256=rgvNoKTOw1befL2uiU6NtQocJEJiZlXQ9q6qypvHSUM,13258 | |||
ldap3/utils/hashed.py,sha256=QegyN9OkmpG6u5ah968dshWLRCyixGeab6H0vs7dVHc,3575 | |||
ldap3/utils/log.py,sha256=PdJx5qNyS3Uihg77cdIh69LUYYmmUkE0TnxgJVy9crw,6738 | |||
ldap3/utils/ntlm.py,sha256=YAlYH2VQxLEVv4U9YeWwcwNDE_6Ts2aplklNc8wkqIQ,19719 | |||
ldap3/utils/ordDict.py,sha256=mmMzSklrAxwRZprA1Lj5K1D-eD-HLWVHxQVQD0NiPnQ,4251 | |||
ldap3/utils/repr.py,sha256=F5zOv9mcI27U_kOIYAG-1YnQZ7M7UMckRpcFOMB07S4,1700 | |||
ldap3/utils/tls_backport.py,sha256=-r1PvYoVUVbcqtjnK-O4jWbY4JEt4a7Mp5EO9q0Gkpc,5426 | |||
ldap3/utils/uri.py,sha256=wjwCiFNE5-FKxaaofYUUT8wOu43zeB-9FWCDrKTYF3Y,4900 | |||
ldap3-2.5.dist-info/METADATA,sha256=W_XYqIDOTbYKkzcfrRIOjkOZpymH7DXcEQF_njAWkQE,4662 | |||
ldap3-2.5.dist-info/RECORD,, | |||
ldap3-2.5.dist-info/WHEEL,sha256=saUSQBLOUjf5ACZdNkhQ0lB6XrHU-l4vpzxq_W1n_AY,116 | |||
ldap3-2.5.dist-info/top_level.txt,sha256=Zg1GRSTgLedl2RfLDLI0W0OaUFdYc0H1zzRbrK96JBw,6 | |||
ldap3-2.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 | |||
ldap3/abstract/__pycache__/attribute.cpython-36.pyc,, | |||
ldap3/abstract/__pycache__/entry.cpython-36.pyc,, | |||
ldap3/abstract/__pycache__/objectDef.cpython-36.pyc,, | |||
ldap3/abstract/__pycache__/cursor.cpython-36.pyc,, | |||
ldap3/abstract/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/abstract/__pycache__/attrDef.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/modify.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/delete.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/unbind.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/bind.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/compare.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/search.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/abandon.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/extended.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/modifyDn.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/operation/__pycache__/add.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/getBindDn.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/removeMembersFromGroups.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/replicaInfo.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/nmasSetUniversalPassword.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/addMembersToGroups.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/endTransaction.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/startTransaction.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/checkGroupsMemberships.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/partition_entry_count.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/nmasGetUniversalPassword.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/extend/novell/__pycache__/listReplicas.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/removeMembersFromGroups.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/addMembersToGroups.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/modifyPassword.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/dirSync.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/unlockAccount.cpython-36.pyc,, | |||
ldap3/extend/microsoft/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/extend/__pycache__/operation.cpython-36.pyc,, | |||
ldap3/extend/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/extend/standard/__pycache__/PagedSearch.cpython-36.pyc,, | |||
ldap3/extend/standard/__pycache__/whoAmI.cpython-36.pyc,, | |||
ldap3/extend/standard/__pycache__/modifyPassword.cpython-36.pyc,, | |||
ldap3/extend/standard/__pycache__/PersistentSearch.cpython-36.pyc,, | |||
ldap3/extend/standard/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/core/__pycache__/exceptions.cpython-36.pyc,, | |||
ldap3/core/__pycache__/results.cpython-36.pyc,, | |||
ldap3/core/__pycache__/usage.cpython-36.pyc,, | |||
ldap3/core/__pycache__/tls.cpython-36.pyc,, | |||
ldap3/core/__pycache__/pooling.cpython-36.pyc,, | |||
ldap3/core/__pycache__/connection.cpython-36.pyc,, | |||
ldap3/core/__pycache__/server.cpython-36.pyc,, | |||
ldap3/core/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/core/__pycache__/timezone.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/conv.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/repr.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/uri.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/dn.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/config.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/ciDict.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/asn1.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/log.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/hashed.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/tls_backport.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/ntlm.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/utils/__pycache__/ordDict.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/sasl.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/external.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/plain.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/digestMd5.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/kerberos.cpython-36.pyc,, | |||
ldap3/protocol/sasl/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/protocol/formatters/__pycache__/formatters.cpython-36.pyc,, | |||
ldap3/protocol/formatters/__pycache__/validators.cpython-36.pyc,, | |||
ldap3/protocol/formatters/__pycache__/standard.cpython-36.pyc,, | |||
ldap3/protocol/formatters/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/novell.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc4512.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc3062.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/oid.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc2849.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc4511.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/convert.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/controls.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/microsoft.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/persistentSearch.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc4527.cpython-36.pyc,, | |||
ldap3/protocol/__pycache__/rfc2696.cpython-36.pyc,, | |||
ldap3/protocol/schemas/__pycache__/ds389.cpython-36.pyc,, | |||
ldap3/protocol/schemas/__pycache__/ad2012R2.cpython-36.pyc,, | |||
ldap3/protocol/schemas/__pycache__/slapd24.cpython-36.pyc,, | |||
ldap3/protocol/schemas/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/protocol/schemas/__pycache__/edir888.cpython-36.pyc,, | |||
ldap3/__pycache__/version.cpython-36.pyc,, | |||
ldap3/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/mockSync.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/asynchronous.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/restartable.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/ldifProducer.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/asyncStream.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/mockBase.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/mockAsync.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/base.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/reusable.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/__init__.cpython-36.pyc,, | |||
ldap3/strategy/__pycache__/sync.cpython-36.pyc,, |
@@ -0,0 +1,6 @@ | |||
Wheel-Version: 1.0 | |||
Generator: bdist_wheel (0.31.0) | |||
Root-Is-Purelib: true | |||
Tag: py2-none-any | |||
Tag: py3-none-any | |||
@@ -0,0 +1 @@ | |||
ldap3 |
@@ -0,0 +1,145 @@ | |||
""" | |||
""" | |||
# Created on 2013.05.15 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2013 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from types import GeneratorType | |||
# authentication | |||
ANONYMOUS = 'ANONYMOUS' | |||
SIMPLE = 'SIMPLE' | |||
SASL = 'SASL' | |||
NTLM = 'NTLM' | |||
# SASL MECHANISMS | |||
EXTERNAL = 'EXTERNAL' | |||
DIGEST_MD5 = 'DIGEST-MD5' | |||
KERBEROS = GSSAPI = 'GSSAPI' | |||
PLAIN = 'PLAIN' | |||
AUTO_BIND_NONE = 'NONE' # same as False | |||
AUTO_BIND_NO_TLS = 'NO_TLS' # same as True | |||
AUTO_BIND_TLS_BEFORE_BIND = 'TLS_BEFORE_BIND' | |||
AUTO_BIND_TLS_AFTER_BIND = 'TLS_AFTER_BIND' | |||
# server IP dual stack mode | |||
IP_SYSTEM_DEFAULT = 'IP_SYSTEM_DEFAULT' | |||
IP_V4_ONLY = 'IP_V4_ONLY' | |||
IP_V6_ONLY = 'IP_V6_ONLY' | |||
IP_V4_PREFERRED = 'IP_V4_PREFERRED' | |||
IP_V6_PREFERRED = 'IP_V6_PREFERRED' | |||
# search scope | |||
BASE = 'BASE' | |||
LEVEL = 'LEVEL' | |||
SUBTREE = 'SUBTREE' | |||
# search alias | |||
DEREF_NEVER = 'NEVER' | |||
DEREF_SEARCH = 'SEARCH' | |||
DEREF_BASE = 'FINDING_BASE' | |||
DEREF_ALWAYS = 'ALWAYS' | |||
# search attributes | |||
ALL_ATTRIBUTES = '*' | |||
NO_ATTRIBUTES = '1.1' # as per RFC 4511 | |||
ALL_OPERATIONAL_ATTRIBUTES = '+' # as per RFC 3673 | |||
# modify type | |||
MODIFY_ADD = 'MODIFY_ADD' | |||
MODIFY_DELETE = 'MODIFY_DELETE' | |||
MODIFY_REPLACE = 'MODIFY_REPLACE' | |||
MODIFY_INCREMENT = 'MODIFY_INCREMENT' | |||
# client strategies | |||
SYNC = 'SYNC' | |||
ASYNC = 'ASYNC' | |||
LDIF = 'LDIF' | |||
RESTARTABLE = 'RESTARTABLE' | |||
REUSABLE = 'REUSABLE' | |||
MOCK_SYNC = 'MOCK_SYNC' | |||
MOCK_ASYNC = 'MOCK_ASYNC' | |||
ASYNC_STREAM = 'ASYNC_STREAM' | |||
# get rootDSE info | |||
NONE = 'NO_INFO' | |||
DSA = 'DSA' | |||
SCHEMA = 'SCHEMA' | |||
ALL = 'ALL' | |||
OFFLINE_EDIR_8_8_8 = 'EDIR_8_8_8' | |||
OFFLINE_AD_2012_R2 = 'AD_2012_R2' | |||
OFFLINE_SLAPD_2_4 = 'SLAPD_2_4' | |||
OFFLINE_DS389_1_3_3 = 'DS389_1_3_3' | |||
# server pooling | |||
FIRST = 'FIRST' | |||
ROUND_ROBIN = 'ROUND_ROBIN' | |||
RANDOM = 'RANDOM' | |||
# Hashed password | |||
HASHED_NONE = 'PLAIN' | |||
HASHED_SHA = 'SHA' | |||
HASHED_SHA256 = 'SHA256' | |||
HASHED_SHA384 = 'SHA384' | |||
HASHED_SHA512 = 'SHA512' | |||
HASHED_MD5 = 'MD5' | |||
HASHED_SALTED_SHA = 'SALTED_SHA' | |||
HASHED_SALTED_SHA256 = 'SALTED_SHA256' | |||
HASHED_SALTED_SHA384 = 'SALTED_SHA384' | |||
HASHED_SALTED_SHA512 = 'SALTED_SHA512' | |||
HASHED_SALTED_MD5 = 'SALTED_MD5' | |||
if str is not bytes: # Python 3 | |||
NUMERIC_TYPES = (int, float) | |||
INTEGER_TYPES = (int, ) | |||
else: | |||
NUMERIC_TYPES = (int, long, float) | |||
INTEGER_TYPES = (int, long) | |||
# types for string and sequence | |||
if str is not bytes: # Python 3 | |||
STRING_TYPES = (str, ) | |||
SEQUENCE_TYPES = (set, list, tuple, GeneratorType, type(dict().keys())) # dict.keys() is a iterable memoryview in Python 3 | |||
else: # Python 2 | |||
try: | |||
from future.types.newstr import newstr | |||
except ImportError: | |||
pass | |||
STRING_TYPES = (str, unicode) | |||
SEQUENCE_TYPES = (set, list, tuple, GeneratorType) | |||
# centralized imports # must be at the end of the __init__.py file | |||
from .version import __author__, __version__, __email__, __description__, __status__, __license__, __url__ | |||
from .utils.config import get_config_parameter, set_config_parameter | |||
from .core.server import Server | |||
from .core.connection import Connection | |||
from .core.tls import Tls | |||
from .core.pooling import ServerPool | |||
from .abstract.objectDef import ObjectDef | |||
from .abstract.attrDef import AttrDef | |||
from .abstract.attribute import Attribute, WritableAttribute, OperationalAttribute | |||
from .abstract.entry import Entry, WritableEntry | |||
from .abstract.cursor import Reader, Writer | |||
from .protocol.rfc4512 import DsaInfo, SchemaInfo |
@@ -0,0 +1,50 @@ | |||
""" | |||
""" | |||
# Created on 2016.08.31 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
STATUS_INIT = 'Initialized' # The entry object is initialized | |||
STATUS_VIRTUAL = 'Virtual' # The entry is a new writable entry, still empty | |||
STATUS_MANDATORY_MISSING = 'Missing mandatory attributes' # The entry has some mandatory attributes missing | |||
STATUS_READ = 'Read' # The entry has been read | |||
STATUS_WRITABLE = 'Writable' # The entry has been made writable, still no changes | |||
STATUS_PENDING_CHANGES = 'Pending changes' # The entry has some changes to commit, mandatory attributes are present | |||
STATUS_COMMITTED = 'Committed' # The entry changes has been committed | |||
STATUS_READY_FOR_DELETION = 'Ready for deletion' # The entry is set to be deleted | |||
STATUS_READY_FOR_MOVING = 'Ready for moving' # The entry is set to be moved in the DIT | |||
STATUS_READY_FOR_RENAMING = 'Ready for renaming' # The entry is set to be renamed | |||
STATUS_DELETED = 'Deleted' # The entry has been deleted | |||
STATUSES = [STATUS_INIT, | |||
STATUS_VIRTUAL, | |||
STATUS_MANDATORY_MISSING, | |||
STATUS_READ, | |||
STATUS_WRITABLE, | |||
STATUS_PENDING_CHANGES, | |||
STATUS_COMMITTED, | |||
STATUS_READY_FOR_DELETION, | |||
STATUS_READY_FOR_MOVING, | |||
STATUS_READY_FOR_RENAMING, | |||
STATUS_DELETED] | |||
INITIAL_STATUSES = [STATUS_READ, STATUS_WRITABLE, STATUS_VIRTUAL] |
@@ -0,0 +1,121 @@ | |||
""" | |||
""" | |||
# Created on 2014.01.11 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from os import linesep | |||
from .. import SEQUENCE_TYPES | |||
from ..core.exceptions import LDAPKeyError | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED | |||
class AttrDef(object): | |||
"""Hold the definition of an attribute | |||
:param name: the real attribute name | |||
:type name: string | |||
:param key: the friendly name to use in queries and when accessing the attribute, default to the real attribute name | |||
:type key: string | |||
:param validate: called to check if the value in the query is valid, the callable is called with the value parameter | |||
:type validate: callable | |||
:param pre_query: called to transform values returned by search | |||
:type pre_query: callable | |||
:param post_query: called to transform values returned by search | |||
:type post_query: callable | |||
:param default: value returned when the attribute is absent (defaults to NotImplemented to allow use of None as default) | |||
:type default: string, integer | |||
:param dereference_dn: reference to an ObjectDef instance. When the attribute value contains a dn it will be searched and substituted in the entry | |||
:type dereference_dn: ObjectDef | |||
:param description: custom attribute description | |||
:type description: string | |||
:param mandatory: specify if attribute is defined as mandatory in LDAP schema | |||
:type mandatory: boolean | |||
""" | |||
def __init__(self, name, key=None, validate=None, pre_query=None, post_query=None, default=NotImplemented, dereference_dn=None, description=None, mandatory=False, single_value=None, alias=None): | |||
self.name = name | |||
self.key = ''.join(key.split()) if key else name # key set to name if not present | |||
self.validate = validate | |||
self.pre_query = pre_query | |||
self.post_query = post_query | |||
self.default = default | |||
self.dereference_dn = dereference_dn | |||
self.description = description | |||
self.mandatory = mandatory | |||
self.single_value = single_value | |||
self.oid_info = None | |||
if not alias: | |||
self.other_names = None | |||
elif isinstance(alias, SEQUENCE_TYPES): # multiple aliases | |||
self.\ | |||
other_names = set(alias) | |||
else: # single alias | |||
self.other_names = set([alias]) # python 2 compatibility | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated AttrDef: <%r>', self) | |||
def __repr__(self): | |||
r = 'ATTR: ' + ', '.join([self.key] + list(self.other_names)) if self.other_names else self.key | |||
r += '' if self.name == self.key else ' [' + self.name + ']' | |||
r += '' if self.default is NotImplemented else ' - default: ' + str(self.default) | |||
r += '' if self.mandatory is None else ' - mandatory: ' + str(self.mandatory) | |||
r += '' if self.single_value is None else ' - single_value: ' + str(self.single_value) | |||
r += '' if not self.dereference_dn else ' - dereference_dn: ' + str(self.dereference_dn) | |||
r += '' if not self.description else ' - description: ' + str(self.description) | |||
if self.oid_info: | |||
for line in str(self.oid_info).split(linesep): | |||
r += linesep + ' ' + line | |||
return r | |||
def __str__(self): | |||
return self.__repr__() | |||
def __eq__(self, other): | |||
if isinstance(other, AttrDef): | |||
return self.key == other.key | |||
return False | |||
def __lt__(self, other): | |||
if isinstance(other, AttrDef): | |||
return self.key < other.key | |||
return False | |||
def __hash__(self): | |||
if self.key: | |||
return hash(self.key) | |||
else: | |||
return id(self) # unique for each instance | |||
def __setattr__(self, key, value): | |||
if hasattr(self, 'key') and key == 'key': # key cannot be changed because is being used for __hash__ | |||
error_message = 'key \'%s\' already set' % key | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
else: | |||
object.__setattr__(self, key, value) |
@@ -0,0 +1,285 @@ | |||
""" | |||
""" | |||
# Created on 2014.01.06 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from os import linesep | |||
from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES | |||
from ..core.exceptions import LDAPCursorError | |||
from ..utils.repr import to_stdout_encoding | |||
from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED | |||
# noinspection PyUnresolvedReferences | |||
class Attribute(object): | |||
"""Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry | |||
Attribute object is read only | |||
- values: contain the processed attribute values | |||
- raw_values': contain the unprocessed attribute values | |||
""" | |||
def __init__(self, attr_def, entry, cursor): | |||
self.key = attr_def.key | |||
self.definition = attr_def | |||
self.values = [] | |||
self.raw_values = [] | |||
self.response = None | |||
self.entry = entry | |||
self.cursor = cursor | |||
other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None | |||
self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names | |||
def __repr__(self): | |||
if len(self.values) == 1: | |||
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) | |||
elif len(self.values) > 1: | |||
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) | |||
filler = ' ' * (len(self.key) + 6) | |||
for value in self.values[1:]: | |||
r += linesep + filler + to_stdout_encoding(value) | |||
else: | |||
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('<no value>') | |||
return r | |||
def __str__(self): | |||
if len(self.values) == 1: | |||
return to_stdout_encoding(self.values[0]) | |||
else: | |||
return to_stdout_encoding(self.values) | |||
def __len__(self): | |||
return len(self.values) | |||
def __iter__(self): | |||
return self.values.__iter__() | |||
def __getitem__(self, item): | |||
return self.values[item] | |||
def __eq__(self, other): | |||
try: | |||
if self.value == other: | |||
return True | |||
except Exception: | |||
return False | |||
def __ne__(self, other): | |||
return not self == other | |||
@property | |||
def value(self): | |||
""" | |||
:return: The single value or a list of values of the attribute. | |||
""" | |||
if not self.values: | |||
return None | |||
return self.values[0] if len(self.values) == 1 else self.values | |||
class OperationalAttribute(Attribute): | |||
"""Operational attribute/values object. Include the search result of an | |||
operational attribute in an entry | |||
OperationalAttribute object is read only | |||
- values: contains the processed attribute values | |||
- raw_values: contains the unprocessed attribute values | |||
It may not have an AttrDef | |||
""" | |||
def __repr__(self): | |||
if len(self.values) == 1: | |||
r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) | |||
elif len(self.values) > 1: | |||
r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0]) | |||
filler = ' ' * (len(self.key) + 6) | |||
for value in sorted(self.values[1:]): | |||
r += linesep + filler + to_stdout_encoding(value) | |||
else: | |||
r = '' | |||
return r | |||
class WritableAttribute(Attribute): | |||
def __repr__(self): | |||
filler = ' ' * (len(self.key) + 6) | |||
if len(self.values) == 1: | |||
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) | |||
elif len(self.values) > 1: | |||
r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0]) | |||
for value in self.values[1:]: | |||
r += linesep + filler + to_stdout_encoding(value) | |||
else: | |||
r = to_stdout_encoding(self.key) + to_stdout_encoding(': <Virtual>') | |||
if self.definition.name in self.entry._changes: | |||
r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name]) | |||
return r | |||
def __iadd__(self, other): | |||
self.add(other) | |||
return Ellipsis # hack to avoid calling set() in entry __setattr__ | |||
def __isub__(self, other): | |||
self.delete(other) | |||
return Ellipsis # hack to avoid calling set_value in entry __setattr__ | |||
def _update_changes(self, changes, remove_old=False): | |||
# checks for friendly key in AttrDef and uses the real attribute name | |||
if self.definition and self.definition.name: | |||
key = self.definition.name | |||
else: | |||
key = self.key | |||
if key not in self.entry._changes or remove_old: # remove old changes (for removing attribute) | |||
self.entry._changes[key] = [] | |||
self.entry._changes[key].append(changes) | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn) | |||
self.entry._state.set_status(STATUS_PENDING_CHANGES) | |||
def add(self, values): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) | |||
# new value for attribute to commit with a MODIFY_ADD | |||
if self.entry._state._initial_status == STATUS_VIRTUAL: | |||
error_message = 'cannot add an attribute value in a new entry' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: | |||
error_message = self.entry.entry_status + ' - cannot add attributes' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if values is None: | |||
error_message = 'value to add cannot be None' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if values is not None: | |||
validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values | |||
if validated is False: | |||
error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
elif validated is not True: # a valid LDAP value equivalent to the actual values | |||
values = validated | |||
self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values])) | |||
def set(self, values): | |||
# new value for attribute to commit with a MODIFY_REPLACE, old values are deleted | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) | |||
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: | |||
error_message = self.entry.entry_status + ' - cannot set attributes' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if values is None: | |||
error_message = 'new value cannot be None' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values | |||
if validated is False: | |||
error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
elif validated is not True: # a valid LDAP value equivalent to the actual values | |||
values = validated | |||
self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True) | |||
def delete(self, values): | |||
# value for attribute to delete in commit with a MODIFY_DELETE | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn) | |||
if self.entry._state._initial_status == STATUS_VIRTUAL: | |||
error_message = 'cannot delete an attribute value in a new entry' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: | |||
error_message = self.entry.entry_status + ' - cannot delete attributes' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if values is None: | |||
error_message = 'value to delete cannot be None' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if not isinstance(values, SEQUENCE_TYPES): | |||
values = [values] | |||
for single_value in values: | |||
if single_value not in self.values: | |||
error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values)) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self._update_changes((MODIFY_DELETE, values)) | |||
def remove(self): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) | |||
if self.entry._state._initial_status == STATUS_VIRTUAL: | |||
error_message = 'cannot remove an attribute in a new entry' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]: | |||
error_message = self.entry.entry_status + ' - cannot remove attributes' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self._update_changes((MODIFY_REPLACE, []), True) | |||
def discard(self): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn) | |||
del self.entry._changes[self.key] | |||
if not self.entry._changes: | |||
self.entry._state.set_status(self.entry._state._initial_status) | |||
@property | |||
def virtual(self): | |||
return False if len(self.values) else True | |||
@property | |||
def changes(self): | |||
if self.key in self.entry._changes: | |||
return self.entry._changes[self.key] | |||
return None |
@@ -0,0 +1,904 @@ | |||
""" | |||
""" | |||
# Created on 2014.01.06 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from collections import namedtuple | |||
from copy import deepcopy | |||
from datetime import datetime | |||
from os import linesep | |||
from time import sleep | |||
from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE | |||
from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter | |||
from ..abstract import STATUS_PENDING_CHANGES | |||
from .attribute import Attribute, OperationalAttribute, WritableAttribute | |||
from .attrDef import AttrDef | |||
from .objectDef import ObjectDef | |||
from .entry import Entry, WritableEntry | |||
from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError | |||
from ..core.results import RESULT_SUCCESS | |||
from ..utils.ciDict import CaseInsensitiveWithAliasDict | |||
from ..utils.dn import safe_dn, safe_rdn | |||
from ..utils.conv import to_raw | |||
from ..utils.config import get_config_parameter | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED | |||
from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY | |||
Operation = namedtuple('Operation', ('request', 'result', 'response')) | |||
def _ret_search_value(value): | |||
return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value | |||
def _create_query_dict(query_text): | |||
""" | |||
Create a dictionary with query key:value definitions | |||
query_text is a comma delimited key:value sequence | |||
""" | |||
query_dict = dict() | |||
if query_text: | |||
for arg_value_str in query_text.split(','): | |||
if ':' in arg_value_str: | |||
arg_value_list = arg_value_str.split(':') | |||
query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip() | |||
return query_dict | |||
class Cursor(object): | |||
# entry_class and attribute_class define the type of entry and attribute used by the cursor | |||
# entry_initial_status defines the initial status of a entry | |||
# entry_class = Entry, must be defined in subclasses | |||
# attribute_class = Attribute, must be defined in subclasses | |||
# entry_initial_status = STATUS, must be defined in subclasses | |||
def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): | |||
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] | |||
self.connection = connection | |||
self.get_operational_attributes = get_operational_attributes | |||
if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind | |||
connection._fire_deferred() | |||
if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)): | |||
object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class) | |||
self.definition = object_def | |||
if attributes: # checks if requested attributes are defined in ObjectDef | |||
not_defined_attributes = [] | |||
if isinstance(attributes, STRING_TYPES): | |||
attributes = [attributes] | |||
for attribute in attributes: | |||
if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def: | |||
not_defined_attributes.append(attribute) | |||
if not_defined_attributes: | |||
error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition]) | |||
self.controls = controls | |||
self.execution_time = None | |||
self.entries = [] | |||
self.schema = self.connection.server.schema | |||
self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor | |||
self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation | |||
def __repr__(self): | |||
r = 'CURSOR : ' + self.__class__.__name__ + linesep | |||
r += 'CONN : ' + str(self.connection) + linesep | |||
r += 'DEFS : ' + ', '.join(self.definition._object_class) | |||
if self.definition._auxiliary_class: | |||
r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']' | |||
r += linesep | |||
# for attr_def in sorted(self.definition): | |||
# r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', ' | |||
# if r[-2] == ',': | |||
# r = r[:-2] | |||
# r += ']' + linesep | |||
if hasattr(self, 'attributes'): | |||
r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep | |||
if isinstance(self, Reader): | |||
if hasattr(self, 'base'): | |||
r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep | |||
if hasattr(self, '_query') and self._query: | |||
r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep | |||
if hasattr(self, 'validated_query') and self.validated_query: | |||
r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep | |||
if hasattr(self, 'query_filter') and self.query_filter: | |||
r += 'FILTER : ' + repr(self.query_filter) + linesep | |||
if hasattr(self, 'execution_time') and self.execution_time: | |||
r += 'ENTRIES: ' + str(len(self.entries)) | |||
r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep | |||
if self.failed: | |||
r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']' | |||
return r | |||
def __str__(self): | |||
return self.__repr__() | |||
def __iter__(self): | |||
return self.entries.__iter__() | |||
def __getitem__(self, item): | |||
"""Return indexed item, if index is not found then try to sequentially search in DN of entries. | |||
If only one entry is found return it else raise a KeyError exception. The exception message | |||
includes the number of entries that matches, if less than 10 entries match then show the DNs | |||
in the exception message. | |||
""" | |||
try: | |||
return self.entries[item] | |||
except TypeError: | |||
pass | |||
if isinstance(item, STRING_TYPES): | |||
found = self.match_dn(item) | |||
if len(found) == 1: | |||
return found[0] | |||
elif len(found) > 1: | |||
error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']')) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise KeyError(error_message) | |||
error_message = 'no entry found' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise KeyError(error_message) | |||
def __len__(self): | |||
return len(self.entries) | |||
if str is not bytes: # Python 3 | |||
def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries | |||
return True | |||
else: # Python 2 | |||
def __nonzero__(self): | |||
return True | |||
def _get_attributes(self, response, attr_defs, entry): | |||
"""Assign the result of the LDAP query to the Entry object dictionary. | |||
If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute. | |||
Returns the default value for missing attributes. | |||
If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value. | |||
""" | |||
conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') | |||
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] | |||
attributes = CaseInsensitiveWithAliasDict() | |||
used_attribute_names = set() | |||
for attr in attr_defs: | |||
attr_def = attr_defs[attr] | |||
attribute_name = None | |||
for attr_name in response['attributes']: | |||
if attr_def.name.lower() == attr_name.lower(): | |||
attribute_name = attr_name | |||
break | |||
if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default | |||
attribute = self.attribute_class(attr_def, entry, self) | |||
attribute.response = response | |||
attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None | |||
if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list(): | |||
attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name]) | |||
else: | |||
if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()): | |||
attribute.values = response['attributes'][attribute_name] | |||
else: | |||
attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default] | |||
if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued) | |||
attribute.values = [attribute.values] | |||
if attr_def.dereference_dn: # try to get object referenced in value | |||
if attribute.values: | |||
temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls) | |||
temp_values = [] | |||
for element in attribute.values: | |||
if entry.entry_dn != element: | |||
temp_values.append(temp_reader.search_object(element)) | |||
else: | |||
error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPObjectDereferenceError(error_message) | |||
del temp_reader # remove the temporary Reader | |||
attribute.values = temp_values | |||
attributes[attribute.key] = attribute | |||
if attribute.other_names: | |||
attributes.set_alias(attribute.key, attribute.other_names) | |||
if attr_def.other_names: | |||
attributes.set_alias(attribute.key, attr_def.other_names) | |||
used_attribute_names.add(attribute_name) | |||
if self.attributes: | |||
used_attribute_names.update(self.attributes) | |||
for attribute_name in response['attributes']: | |||
if attribute_name not in used_attribute_names: | |||
operational_attribute = False | |||
# check if the type is an operational attribute | |||
if attribute_name in self.schema.attribute_types: | |||
if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]: | |||
operational_attribute = True | |||
else: | |||
operational_attribute = True | |||
if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def: | |||
error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self) | |||
attribute.raw_values = response['raw_attributes'][attribute_name] | |||
attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]] | |||
if (conf_operational_attribute_prefix + attribute_name) not in attributes: | |||
attributes[conf_operational_attribute_prefix + attribute_name] = attribute | |||
return attributes | |||
def match_dn(self, dn): | |||
"""Return entries with text in DN""" | |||
matched = [] | |||
for entry in self.entries: | |||
if dn.lower() in entry.entry_dn.lower(): | |||
matched.append(entry) | |||
return matched | |||
def match(self, attributes, value): | |||
"""Return entries with text in one of the specified attributes""" | |||
matched = [] | |||
if not isinstance(attributes, SEQUENCE_TYPES): | |||
attributes = [attributes] | |||
for entry in self.entries: | |||
found = False | |||
for attribute in attributes: | |||
if attribute in entry: | |||
for attr_value in entry[attribute].values: | |||
if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower(): | |||
found = True | |||
elif value == attr_value: | |||
found = True | |||
if found: | |||
matched.append(entry) | |||
break | |||
if found: | |||
break | |||
# checks raw values, tries to convert value to byte | |||
raw_value = to_raw(value) | |||
if isinstance(raw_value, (bytes, bytearray)): | |||
for attr_value in entry[attribute].raw_values: | |||
if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower(): | |||
found = True | |||
elif raw_value == attr_value: | |||
found = True | |||
if found: | |||
matched.append(entry) | |||
break | |||
if found: | |||
break | |||
return matched | |||
def _create_entry(self, response): | |||
if not response['type'] == 'searchResEntry': | |||
return None | |||
entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition | |||
entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry) | |||
entry._state.entry_raw_attributes = deepcopy(response['raw_attributes']) | |||
entry._state.response = response | |||
entry._state.read_time = datetime.now() | |||
entry._state.set_status(self.entry_initial_status) | |||
for attr in entry: # returns the whole attribute object | |||
entry.__dict__[attr.key] = attr | |||
return entry | |||
def _execute_query(self, query_scope, attributes): | |||
if not self.connection: | |||
error_message = 'no connection established' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
old_query_filter = None | |||
if query_scope == BASE: # requesting a single object so an always-valid filter is set | |||
if hasattr(self, 'query_filter'): # only Reader has a query filter | |||
old_query_filter = self.query_filter | |||
self.query_filter = '(objectclass=*)' | |||
else: | |||
self._create_query_filter() | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self) | |||
with self.connection: | |||
result = self.connection.search(search_base=self.base, | |||
search_filter=self.query_filter, | |||
search_scope=query_scope, | |||
dereference_aliases=self.dereference_aliases, | |||
attributes=attributes if attributes else list(self.attributes), | |||
get_operational_attributes=self.get_operational_attributes, | |||
controls=self.controls) | |||
if not self.connection.strategy.sync: | |||
response, result, request = self.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.connection.response | |||
result = self.connection.result | |||
request = self.connection.request | |||
self._store_operation_in_history(request, result, response) | |||
if self._do_not_reset: # trick to not remove entries when using _refresh() | |||
return self._create_entry(response[0]) | |||
self.entries = [] | |||
for r in response: | |||
entry = self._create_entry(r) | |||
if entry is not None: | |||
self.entries.append(entry) | |||
if 'objectClass' in entry: | |||
for object_class in entry.objectClass: | |||
if self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class: | |||
# add auxiliary class to object definition | |||
self.definition._auxiliary_class.append(object_class) | |||
self.definition._populate_attr_defs(object_class) | |||
self.execution_time = datetime.now() | |||
if old_query_filter: # requesting a single object so an always-valid filter is set | |||
self.query_filter = old_query_filter | |||
def remove(self, entry): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self) | |||
self.entries.remove(entry) | |||
def _reset_history(self): | |||
self._operation_history = list() | |||
def _store_operation_in_history(self, request, result, response): | |||
self._operation_history.append(Operation(request, result, response)) | |||
@property | |||
def operations(self): | |||
return self._operation_history | |||
@property | |||
def errors(self): | |||
return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS] | |||
@property | |||
def failed(self): | |||
if hasattr(self, '_operation_history'): | |||
return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history]) | |||
class Reader(Cursor): | |||
"""Reader object to perform searches: | |||
:param connection: the LDAP connection object to use | |||
:type connection: LDAPConnection | |||
:param object_def: the ObjectDef of the LDAP object returned | |||
:type object_def: ObjectDef | |||
:param query: the simplified query (will be transformed in an LDAP filter) | |||
:type query: str | |||
:param base: starting base of the search | |||
:type base: str | |||
:param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR) | |||
:type components_in_and: bool | |||
:param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True) | |||
:type sub_tree: bool | |||
:param get_operational_attributes: specify if operational attributes are returned or not | |||
:type get_operational_attributes: bool | |||
:param controls: controls to be used in search | |||
:type controls: tuple | |||
""" | |||
entry_class = Entry # entries are read_only | |||
attribute_class = Attribute # attributes are read_only | |||
entry_initial_status = STATUS_READ | |||
def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): | |||
Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) | |||
self._components_in_and = components_in_and | |||
self.sub_tree = sub_tree | |||
self._query = query | |||
self.base = base | |||
self.dereference_aliases = DEREF_ALWAYS | |||
self.validated_query = None | |||
self._query_dict = dict() | |||
self._validated_query_dict = dict() | |||
self.query_filter = None | |||
self.reset() | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Reader Cursor: <%r>', self) | |||
@property | |||
def query(self): | |||
return self._query | |||
@query.setter | |||
def query(self, value): | |||
self._query = value | |||
self.reset() | |||
@property | |||
def components_in_and(self): | |||
return self._components_in_and | |||
@components_in_and.setter | |||
def components_in_and(self, value): | |||
self._components_in_and = value | |||
self.reset() | |||
def clear(self): | |||
"""Clear the Reader search parameters | |||
""" | |||
self.dereference_aliases = DEREF_ALWAYS | |||
self._reset_history() | |||
def reset(self): | |||
"""Clear all the Reader parameters | |||
""" | |||
self.clear() | |||
self.validated_query = None | |||
self._query_dict = dict() | |||
self._validated_query_dict = dict() | |||
self.execution_time = None | |||
self.query_filter = None | |||
self.entries = [] | |||
self._create_query_filter() | |||
def _validate_query(self): | |||
"""Processes the text query and verifies that the requested friendly names are in the Reader dictionary | |||
If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised | |||
""" | |||
if not self._query_dict: | |||
self._query_dict = _create_query_dict(self._query) | |||
query = '' | |||
for d in sorted(self._query_dict): | |||
attr = d[1:] if d[0] in '&|' else d | |||
for attr_def in self.definition: | |||
if ''.join(attr.split()).lower() == attr_def.key.lower(): | |||
attr = attr_def.key | |||
break | |||
if attr in self.definition: | |||
vals = sorted(self._query_dict[d].split(';')) | |||
query += (d[0] + attr if d[0] in '&|' else attr) + ': ' | |||
for val in vals: | |||
val = val.strip() | |||
val_not = True if val[0] == '!' else False | |||
val_search_operator = '=' # default | |||
if val_not: | |||
if val[1:].lstrip()[0] not in '=<>~': | |||
value = val[1:].lstrip() | |||
else: | |||
val_search_operator = val[1:].lstrip()[0] | |||
value = val[1:].lstrip()[1:] | |||
else: | |||
if val[0] not in '=<>~': | |||
value = val.lstrip() | |||
else: | |||
val_search_operator = val[0] | |||
value = val[1:].lstrip() | |||
if self.definition[attr].validate: | |||
validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values | |||
if validated is False: | |||
error_message = 'validation failed for attribute %s and value %s' % (d, val) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
elif validated is not True: # a valid LDAP value equivalent to the actual values | |||
value = validated | |||
if val_not: | |||
query += '!' + val_search_operator + str(value) | |||
else: | |||
query += val_search_operator + str(value) | |||
query += ';' | |||
query = query[:-1] + ', ' | |||
else: | |||
error_message = 'attribute \'%s\' not in definition' % attr | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self.validated_query = query[:-2] | |||
self._validated_query_dict = _create_query_dict(self.validated_query) | |||
def _create_query_filter(self): | |||
"""Converts the query dictionary to the filter text""" | |||
self.query_filter = '' | |||
if self.definition._object_class: | |||
self.query_filter += '(&' | |||
if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1: | |||
self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')' | |||
elif isinstance(self.definition._object_class, SEQUENCE_TYPES): | |||
self.query_filter += '(&' | |||
for object_class in self.definition._object_class: | |||
self.query_filter += '(objectClass=' + object_class + ')' | |||
self.query_filter += ')' | |||
else: | |||
error_message = 'object class must be a string or a list' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter | |||
if 'objectclass' not in self._query.lower(): | |||
self.query_filter += self._query + ')' # if objectclass not in filter adds from definition | |||
else: | |||
self.query_filter = self._query | |||
return | |||
elif self._query: # if a simplified filter is present | |||
if not self.components_in_and: | |||
self.query_filter += '(|' | |||
elif not self.definition._object_class: | |||
self.query_filter += '(&' | |||
self._validate_query() | |||
attr_counter = 0 | |||
for attr in sorted(self._validated_query_dict): | |||
attr_counter += 1 | |||
multi = True if ';' in self._validated_query_dict[attr] else False | |||
vals = sorted(self._validated_query_dict[attr].split(';')) | |||
attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr] | |||
if attr_def.pre_query: | |||
modvals = [] | |||
for val in vals: | |||
modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:])) | |||
vals = modvals | |||
if multi: | |||
if attr[0] in '&|': | |||
self.query_filter += '(' + attr[0] | |||
else: | |||
self.query_filter += '(|' | |||
for val in vals: | |||
if val[0] == '!': | |||
self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))' | |||
else: | |||
self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')' | |||
if multi: | |||
self.query_filter += ')' | |||
if not self.components_in_and: | |||
self.query_filter += '))' | |||
else: | |||
self.query_filter += ')' | |||
if not self.definition._object_class and attr_counter == 1: # removes unneeded starting filter | |||
self.query_filter = self.query_filter[2: -1] | |||
if self.query_filter == '(|)' or self.query_filter == '(&)': # removes empty filter | |||
self.query_filter = '' | |||
else: # no query, remove unneeded leading (& | |||
self.query_filter = self.query_filter[2:] | |||
def search(self, attributes=None): | |||
"""Perform the LDAP search | |||
:return: Entries found in search | |||
""" | |||
self.clear() | |||
query_scope = SUBTREE if self.sub_tree else LEVEL | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'performing search in <%s>', self) | |||
self._execute_query(query_scope, attributes) | |||
return self.entries | |||
def search_object(self, entry_dn=None, attributes=None): # base must be a single dn | |||
"""Perform the LDAP search operation SINGLE_OBJECT scope | |||
:return: Entry found in search | |||
""" | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'performing object search in <%s>', self) | |||
self.clear() | |||
if entry_dn: | |||
old_base = self.base | |||
self.base = entry_dn | |||
self._execute_query(BASE, attributes) | |||
self.base = old_base | |||
else: | |||
self._execute_query(BASE, attributes) | |||
return self.entries[0] if len(self.entries) > 0 else None | |||
def search_level(self, attributes=None): | |||
"""Perform the LDAP search operation with SINGLE_LEVEL scope | |||
:return: Entries found in search | |||
""" | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'performing single level search in <%s>', self) | |||
self.clear() | |||
self._execute_query(LEVEL, attributes) | |||
return self.entries | |||
def search_subtree(self, attributes=None): | |||
"""Perform the LDAP search operation WHOLE_SUBTREE scope | |||
:return: Entries found in search | |||
""" | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'performing whole subtree search in <%s>', self) | |||
self.clear() | |||
self._execute_query(SUBTREE, attributes) | |||
return self.entries | |||
def _entries_generator(self, responses): | |||
for response in responses: | |||
yield self._create_entry(response) | |||
def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None): | |||
"""Perform a paged search, can be called as an Iterator | |||
:param attributes: optional attributes to search | |||
:param paged_size: number of entries returned in each search | |||
:type paged_size: int | |||
:param paged_criticality: specify if server must not execute the search if it is not capable of paging searches | |||
:type paged_criticality: bool | |||
:param generator: if True the paged searches are executed while generating the entries, | |||
if False all the paged searches are execute before returning the generator | |||
:type generator: bool | |||
:return: Entries found in search | |||
""" | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size)) | |||
if not self.connection: | |||
error_message = 'no connection established' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self.clear() | |||
self._create_query_filter() | |||
self.entries = [] | |||
self.execution_time = datetime.now() | |||
response = self.connection.extend.standard.paged_search(search_base=self.base, | |||
search_filter=self.query_filter, | |||
search_scope=SUBTREE if self.sub_tree else LEVEL, | |||
dereference_aliases=self.dereference_aliases, | |||
attributes=attributes if attributes else self.attributes, | |||
get_operational_attributes=self.get_operational_attributes, | |||
controls=self.controls, | |||
paged_size=paged_size, | |||
paged_criticality=paged_criticality, | |||
generator=generator) | |||
if generator: | |||
return self._entries_generator(response) | |||
else: | |||
return list(self._entries_generator(response)) | |||
class Writer(Cursor): | |||
entry_class = WritableEntry | |||
attribute_class = WritableAttribute | |||
entry_initial_status = STATUS_WRITABLE | |||
@staticmethod | |||
def from_cursor(cursor, connection=None, object_def=None, custom_validator=None): | |||
if connection is None: | |||
connection = cursor.connection | |||
if object_def is None: | |||
object_def = cursor.definition | |||
writer = Writer(connection, object_def, attributes=cursor.attributes) | |||
for entry in cursor.entries: | |||
if isinstance(cursor, Reader): | |||
entry.entry_writable(object_def, writer, custom_validator=custom_validator) | |||
elif isinstance(cursor, Writer): | |||
pass | |||
else: | |||
error_message = 'unknown cursor type %s' % str(type(cursor)) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s', error_message) | |||
raise LDAPCursorError(error_message) | |||
writer.execution_time = cursor.execution_time | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor) | |||
return writer | |||
@staticmethod | |||
def from_response(connection, object_def, response=None): | |||
if response is None: | |||
if not connection.strategy.sync: | |||
error_message = 'with asynchronous strategies response must be specified' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s', error_message) | |||
raise LDAPCursorError(error_message) | |||
elif connection.response: | |||
response = connection.response | |||
else: | |||
error_message = 'response not present' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s', error_message) | |||
raise LDAPCursorError(error_message) | |||
writer = Writer(connection, object_def) | |||
for resp in response: | |||
if resp['type'] == 'searchResEntry': | |||
entry = writer._create_entry(resp) | |||
writer.entries.append(entry) | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Writer Cursor <%r> from response', writer) | |||
return writer | |||
def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None): | |||
Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class) | |||
self.dereference_aliases = DEREF_NEVER | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Writer Cursor: <%r>', self) | |||
def commit(self, refresh=True): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'committed changes for <%s>', self) | |||
self._reset_history() | |||
successful = True | |||
for entry in self.entries: | |||
if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False): | |||
successful = False | |||
self.execution_time = datetime.now() | |||
return successful | |||
def discard(self): | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'discarded changes for <%s>', self) | |||
for entry in self.entries: | |||
entry.entry_discard_changes() | |||
def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn | |||
"""Performs the LDAP search operation SINGLE_OBJECT scope | |||
:return: Entry found in search | |||
""" | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self) | |||
if not self.connection: | |||
error_message = 'no connection established' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
response = [] | |||
with self.connection: | |||
counter = 0 | |||
while counter < tries: | |||
result = self.connection.search(search_base=entry_dn, | |||
search_filter='(objectclass=*)', | |||
search_scope=BASE, | |||
dereference_aliases=DEREF_NEVER, | |||
attributes=attributes if attributes else self.attributes, | |||
get_operational_attributes=self.get_operational_attributes, | |||
controls=controls) | |||
if not self.connection.strategy.sync: | |||
response, result, request = self.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.connection.response | |||
result = self.connection.result | |||
request = self.connection.request | |||
if result['result'] in [RESULT_SUCCESS]: | |||
break | |||
sleep(seconds) | |||
counter += 1 | |||
self._store_operation_in_history(request, result, response) | |||
if len(response) == 1: | |||
return self._create_entry(response[0]) | |||
elif len(response) == 0: | |||
return None | |||
error_message = 'more than 1 entry returned for a single object search' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
def new(self, dn): | |||
if log_enabled(BASIC): | |||
log(BASIC, 'creating new entry <%s> for <%s>', dn, self) | |||
dn = safe_dn(dn) | |||
for entry in self.entries: # checks if dn is already used in an cursor entry | |||
if entry.entry_dn == dn: | |||
error_message = 'dn already present in cursor' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
rdns = safe_rdn(dn, decompose=True) | |||
entry = self.entry_class(dn, self) # defines a new empty Entry | |||
for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual | |||
entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self) | |||
entry.__dict__[attr] = entry._state.attributes[attr] | |||
entry.objectclass.set(self.definition._object_class) | |||
for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax) | |||
if rdn[0] in entry._state.definition._attributes: | |||
rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding | |||
if rdn_name not in entry._state.attributes: | |||
entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self) | |||
entry.__dict__[rdn_name] = entry._state.attributes[rdn_name] | |||
entry.__dict__[rdn_name].set(rdn[1]) | |||
else: | |||
error_message = 'rdn type \'%s\' not in object class definition' % rdn[0] | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
entry._state.set_status(STATUS_VIRTUAL) # set intial status | |||
entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING | |||
self.entries.append(entry) | |||
return entry | |||
def refresh_entry(self, entry, tries=4, seconds=2): | |||
conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX') | |||
self._do_not_reset = True | |||
attr_list = [] | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self) | |||
for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes | |||
if attr.lower().startswith(conf_operational_attribute_prefix.lower()): | |||
continue | |||
if entry._state.definition[attr].name: | |||
attr_list.append(entry._state.definition[attr].name) | |||
else: | |||
attr_list.append(entry._state.definition[attr].key) | |||
temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition | |||
self._do_not_reset = False | |||
if temp_entry: | |||
temp_entry._state.origin = entry._state.origin | |||
entry.__dict__.clear() | |||
entry.__dict__['_state'] = temp_entry._state | |||
for attr in entry._state.attributes: # returns the attribute key | |||
entry.__dict__[attr] = entry._state.attributes[attr] | |||
for attr in entry.entry_attributes: # if any attribute of the class was deleted makes it virtual | |||
if attr not in entry._state.attributes and attr in entry.entry_definition._attributes: | |||
entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self) | |||
entry.__dict__[attr] = entry._state.attributes[attr] | |||
entry._state.set_status(entry._state._initial_status) | |||
return True | |||
return False |
@@ -0,0 +1,671 @@ | |||
""" | |||
""" | |||
# Created on 2016.08.19 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
import json | |||
try: | |||
from collections import OrderedDict | |||
except ImportError: | |||
from ..utils.ordDict import OrderedDict # for Python 2.6 | |||
from os import linesep | |||
from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE | |||
from .attribute import WritableAttribute | |||
from .objectDef import ObjectDef | |||
from .attrDef import AttrDef | |||
from ..core.exceptions import LDAPKeyError, LDAPCursorError | |||
from ..utils.conv import check_json_dict, format_json, prepare_for_stream | |||
from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header | |||
from ..utils.dn import safe_dn, safe_rdn, to_dn | |||
from ..utils.repr import to_stdout_encoding | |||
from ..utils.ciDict import CaseInsensitiveWithAliasDict | |||
from ..utils.config import get_config_parameter | |||
from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\ | |||
STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES | |||
from ..core.results import RESULT_SUCCESS | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED | |||
class EntryState(object): | |||
"""Contains data on the status of the entry. Does not pollute the Entry __dict__. | |||
""" | |||
def __init__(self, dn, cursor): | |||
self.dn = dn | |||
self._initial_status = None | |||
self._to = None # used for move and rename | |||
self.status = STATUS_INIT | |||
self.attributes = CaseInsensitiveWithAliasDict() | |||
self.raw_attributes = CaseInsensitiveWithAliasDict() | |||
self.response = None | |||
self.cursor = cursor | |||
self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server) | |||
self.read_time = None | |||
self.changes = OrderedDict() # includes changes to commit in a writable entry | |||
if cursor.definition: | |||
self.definition = cursor.definition | |||
else: | |||
self.definition = None | |||
def __repr__(self): | |||
if self.__dict__ and self.dn is not None: | |||
r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '<never>') + linesep | |||
r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep | |||
r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '<None>') + linesep | |||
r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep | |||
r += 'response: ' + ('present' if self.response else '<None>') + linesep | |||
r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '<None>') + linesep | |||
return r | |||
else: | |||
return object.__repr__(self) | |||
def __str__(self): | |||
return self.__repr__() | |||
def set_status(self, status): | |||
conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')] | |||
if status not in STATUSES: | |||
error_message = 'invalid entry status ' + str(status) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if status in INITIAL_STATUSES: | |||
self._initial_status = status | |||
self.status = status | |||
if status == STATUS_DELETED: | |||
self._initial_status = STATUS_VIRTUAL | |||
if status == STATUS_COMMITTED: | |||
self._initial_status = STATUS_WRITABLE | |||
if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries | |||
for attr in self.definition._attributes: | |||
if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def: | |||
if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes: | |||
self.status = STATUS_MANDATORY_MISSING | |||
break | |||
class EntryBase(object): | |||
"""The Entry object contains a single LDAP entry. | |||
Attributes can be accessed either by sequence, by assignment | |||
or as dictionary keys. Keys are not case sensitive. | |||
The Entry object is read only | |||
- The DN is retrieved by _dn | |||
- The cursor reference is in _cursor | |||
- Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods | |||
""" | |||
def __init__(self, dn, cursor): | |||
self.__dict__['_state'] = EntryState(dn, cursor) | |||
def __repr__(self): | |||
if self.__dict__ and self.entry_dn is not None: | |||
r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '<never>') + linesep | |||
if self._state.attributes: | |||
for attr in sorted(self._state.attributes): | |||
if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes): | |||
r += ' ' + repr(self._state.attributes[attr]) + linesep | |||
return r | |||
else: | |||
return object.__repr__(self) | |||
def __str__(self): | |||
return self.__repr__() | |||
def __iter__(self): | |||
for attribute in self._state.attributes: | |||
yield self._state.attributes[attribute] | |||
# raise StopIteration # deprecated in PEP 479 | |||
return | |||
def __contains__(self, item): | |||
try: | |||
self.__getitem__(item) | |||
return True | |||
except LDAPKeyError: | |||
return False | |||
def __getattr__(self, item): | |||
if isinstance(item, STRING_TYPES): | |||
if item == '_state': | |||
return self.__dict__['_state'] | |||
item = ''.join(item.split()).lower() | |||
attr_found = None | |||
for attr in self._state.attributes.keys(): | |||
if item == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.aliases(): | |||
if item == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.keys(): | |||
if item + ';binary' == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.aliases(): | |||
if item + ';binary' == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.keys(): | |||
if item + ';range' in attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.aliases(): | |||
if item + ';range' in attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
error_message = 'attribute \'%s\' not found' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
return self._state.attributes[attr] | |||
error_message = 'attribute name must be a string' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
def __setattr__(self, item, value): | |||
if item in self._state.attributes: | |||
error_message = 'attribute \'%s\' is read only' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
else: | |||
error_message = 'entry is read only, cannot add \'%s\'' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
def __getitem__(self, item): | |||
if isinstance(item, STRING_TYPES): | |||
item = ''.join(item.split()).lower() | |||
attr_found = None | |||
for attr in self._state.attributes.keys(): | |||
if item == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.aliases(): | |||
if item == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.keys(): | |||
if item + ';binary' == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
for attr in self._state.attributes.aliases(): | |||
if item + ';binary' == attr.lower(): | |||
attr_found = attr | |||
break | |||
if not attr_found: | |||
error_message = 'key \'%s\' not found' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
return self._state.attributes[attr] | |||
error_message = 'key must be a string' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
def __eq__(self, other): | |||
if isinstance(other, EntryBase): | |||
return self.entry_dn == other.entry_dn | |||
return False | |||
def __lt__(self, other): | |||
if isinstance(other, EntryBase): | |||
return self.entry_dn <= other.entry_dn | |||
return False | |||
@property | |||
def entry_dn(self): | |||
return self._state.dn | |||
@property | |||
def entry_cursor(self): | |||
return self._state.cursor | |||
@property | |||
def entry_status(self): | |||
return self._state.status | |||
@property | |||
def entry_definition(self): | |||
return self._state.definition | |||
@property | |||
def entry_raw_attributes(self): | |||
return self._state.entry_raw_attributes | |||
def entry_raw_attribute(self, name): | |||
""" | |||
:param name: name of the attribute | |||
:return: raw (unencoded) value of the attribute, None if attribute is not found | |||
""" | |||
return self._state.entry_raw_attributes[name] if name in self._state.entry_raw_attributes else None | |||
@property | |||
def entry_mandatory_attributes(self): | |||
return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory] | |||
@property | |||
def entry_attributes(self): | |||
return list(self._state.attributes.keys()) | |||
@property | |||
def entry_attributes_as_dict(self): | |||
return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items()) | |||
@property | |||
def entry_read_time(self): | |||
return self._state.read_time | |||
@property | |||
def _changes(self): | |||
return self._state.changes | |||
def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True): | |||
json_entry = dict() | |||
json_entry['dn'] = self.entry_dn | |||
if checked_attributes: | |||
if not include_empty: | |||
# needed for python 2.6 compatibility | |||
json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key]) | |||
else: | |||
json_entry['attributes'] = self.entry_attributes_as_dict | |||
if raw: | |||
if not include_empty: | |||
# needed for python 2.6 compatibility | |||
json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key]) | |||
else: | |||
json_entry['raw'] = dict(self.entry_raw_attributes) | |||
if str is bytes: # Python 2 | |||
check_json_dict(json_entry) | |||
json_output = json.dumps(json_entry, | |||
ensure_ascii=True, | |||
sort_keys=sort, | |||
indent=indent, | |||
check_circular=True, | |||
default=format_json, | |||
separators=(',', ': ')) | |||
if stream: | |||
stream.write(json_output) | |||
return json_output | |||
def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None): | |||
ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order) | |||
ldif_lines = add_ldif_header(ldif_lines) | |||
line_separator = line_separator or linesep | |||
ldif_output = line_separator.join(ldif_lines) | |||
if stream: | |||
if stream.tell() == 0: | |||
header = add_ldif_header(['-'])[0] | |||
stream.write(prepare_for_stream(header + line_separator + line_separator)) | |||
stream.write(prepare_for_stream(ldif_output + line_separator + line_separator)) | |||
return ldif_output | |||
class Entry(EntryBase): | |||
"""The Entry object contains a single LDAP entry. | |||
Attributes can be accessed either by sequence, by assignment | |||
or as dictionary keys. Keys are not case sensitive. | |||
The Entry object is read only | |||
- The DN is retrieved by _dn() | |||
- The Reader reference is in _cursor() | |||
- Raw attributes values are retrieved by the _ra_attributes and | |||
_raw_attribute() methods | |||
""" | |||
def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None): | |||
if not self.entry_cursor.schema: | |||
error_message = 'schema must be available to make an entry writable' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
# returns a new WritableEntry and its Writer cursor | |||
if object_def is None: | |||
if self.entry_cursor.definition._object_class: | |||
object_def = self.entry_definition._object_class | |||
auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else []) | |||
elif 'objectclass' in self: | |||
object_def = self.objectclass.values | |||
if not object_def: | |||
error_message = 'object class must be specified to make an entry writable' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
if not isinstance(object_def, ObjectDef): | |||
object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class) | |||
if attributes: | |||
if isinstance(attributes, STRING_TYPES): | |||
attributes = [attributes] | |||
if isinstance(attributes, SEQUENCE_TYPES): | |||
for attribute in attributes: | |||
if attribute not in object_def._attributes: | |||
error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
else: | |||
attributes = [] | |||
if not writer_cursor: | |||
from .cursor import Writer # local import to avoid circular reference in import at startup | |||
writable_cursor = Writer(self.entry_cursor.connection, object_def) | |||
else: | |||
writable_cursor = writer_cursor | |||
if attributes: # force reading of attributes | |||
writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes) | |||
else: | |||
writable_entry = writable_cursor._create_entry(self._state.response) | |||
writable_cursor.entries.append(writable_entry) | |||
writable_entry._state.read_time = self.entry_read_time | |||
writable_entry._state.origin = self # reference to the original read-only entry | |||
# checks original entry for custom definitions in AttrDefs | |||
for attr in writable_entry._state.origin.entry_definition._attributes: | |||
original_attr = writable_entry._state.origin.entry_definition._attributes[attr] | |||
if attr != original_attr.name and attr not in writable_entry._state.attributes: | |||
old_attr_def = writable_entry.entry_definition._attributes[original_attr.name] | |||
new_attr_def = AttrDef(original_attr.name, | |||
key=attr, | |||
validate=original_attr.validate, | |||
pre_query=original_attr.pre_query, | |||
post_query=original_attr.post_query, | |||
default=original_attr.default, | |||
dereference_dn=original_attr.dereference_dn, | |||
description=original_attr.description, | |||
mandatory=old_attr_def.mandatory, # keeps value read from schema | |||
single_value=old_attr_def.single_value, # keeps value read from schema | |||
alias=original_attr.other_names) | |||
object_def = writable_entry.entry_definition | |||
object_def -= old_attr_def | |||
object_def += new_attr_def | |||
# updates attribute name in entry attributes | |||
new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor) | |||
if original_attr.name in writable_entry._state.attributes: | |||
new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names | |||
new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values | |||
new_attr.values = writable_entry._state.attributes[original_attr.name].values | |||
new_attr.response = writable_entry._state.attributes[original_attr.name].response | |||
writable_entry._state.attributes[attr] = new_attr | |||
# writable_entry._state.attributes.set_alias(attr, new_attr.other_names) | |||
del writable_entry._state.attributes[original_attr.name] | |||
writable_entry._state.set_status(STATUS_WRITABLE) | |||
return writable_entry | |||
class WritableEntry(EntryBase): | |||
def __setitem__(self, key, value): | |||
if value is not Ellipsis: # hack for using implicit operators in writable attributes | |||
self.__setattr__(key, value) | |||
def __setattr__(self, item, value): | |||
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] | |||
if item == '_state' and isinstance(value, EntryState): | |||
self.__dict__['_state'] = value | |||
return | |||
if value is not Ellipsis: # hack for using implicit operators in writable attributes | |||
# checks if using an alias | |||
if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def: | |||
if item not in self._state.attributes: # setting value to an attribute still without values | |||
new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor) | |||
self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict | |||
self._state.attributes[item].set(value) # try to add to new_values | |||
else: | |||
error_message = 'attribute \'%s\' not defined' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
def __getattr__(self, item): | |||
if isinstance(item, STRING_TYPES): | |||
if item == '_state': | |||
return self.__dict__['_state'] | |||
item = ''.join(item.split()).lower() | |||
for attr in self._state.attributes.keys(): | |||
if item == attr.lower(): | |||
return self._state.attributes[attr] | |||
for attr in self._state.attributes.aliases(): | |||
if item == attr.lower(): | |||
return self._state.attributes[attr] | |||
if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive | |||
self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor) | |||
self.entry_cursor.attributes.add(item) | |||
return self._state.attributes[item] | |||
error_message = 'attribute \'%s\' not defined' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
else: | |||
error_message = 'attribute name must be a string' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
@property | |||
def entry_virtual_attributes(self): | |||
return [attr for attr in self.entry_attributes if self[attr].virtual] | |||
def entry_commit_changes(self, refresh=True, controls=None, clear_history=True): | |||
if clear_history: | |||
self.entry_cursor._reset_history() | |||
if self.entry_status == STATUS_READY_FOR_DELETION: | |||
result = self.entry_cursor.connection.delete(self.entry_dn, controls) | |||
if not self.entry_cursor.connection.strategy.sync: | |||
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.entry_cursor.connection.response | |||
result = self.entry_cursor.connection.result | |||
request = self.entry_cursor.connection.request | |||
self.entry_cursor._store_operation_in_history(request, result, response) | |||
if result['result'] == RESULT_SUCCESS: | |||
dn = self.entry_dn | |||
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry | |||
cursor = self._state.origin.entry_cursor | |||
self._state.origin.__dict__.clear() | |||
self._state.origin.__dict__['_state'] = EntryState(dn, cursor) | |||
self._state.origin._state.set_status(STATUS_DELETED) | |||
cursor = self.entry_cursor | |||
self.__dict__.clear() | |||
self._state = EntryState(dn, cursor) | |||
self._state.set_status(STATUS_DELETED) | |||
return True | |||
return False | |||
elif self.entry_status == STATUS_READY_FOR_MOVING: | |||
result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to) | |||
if not self.entry_cursor.connection.strategy.sync: | |||
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.entry_cursor.connection.response | |||
result = self.entry_cursor.connection.result | |||
request = self.entry_cursor.connection.request | |||
self.entry_cursor._store_operation_in_history(request, result, response) | |||
if result['result'] == RESULT_SUCCESS: | |||
self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to) | |||
if refresh: | |||
if self.entry_refresh(): | |||
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin | |||
self._state.origin._state.dn = self.entry_dn | |||
self._state.set_status(STATUS_COMMITTED) | |||
self._state._to = None | |||
return True | |||
return False | |||
elif self.entry_status == STATUS_READY_FOR_RENAMING: | |||
rdn = '+'.join(safe_rdn(self._state._to)) | |||
result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn) | |||
if not self.entry_cursor.connection.strategy.sync: | |||
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.entry_cursor.connection.response | |||
result = self.entry_cursor.connection.result | |||
request = self.entry_cursor.connection.request | |||
self.entry_cursor._store_operation_in_history(request, result, response) | |||
if result['result'] == RESULT_SUCCESS: | |||
self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:]) | |||
if refresh: | |||
if self.entry_refresh(): | |||
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin | |||
self._state.origin._state.dn = self.entry_dn | |||
self._state.set_status(STATUS_COMMITTED) | |||
self._state._to = None | |||
return True | |||
return False | |||
elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]: | |||
missing_attributes = [] | |||
for attr in self.entry_mandatory_attributes: | |||
if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes: | |||
missing_attributes.append('\'' + attr + '\'') | |||
error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
elif self.entry_status == STATUS_PENDING_CHANGES: | |||
if self._changes: | |||
if self.entry_definition._auxiliary_class: # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present | |||
for attr in self._changes: | |||
# checks schema to see if attribute is defined in one of the already present object classes | |||
attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in | |||
for object_class in self.objectclass: | |||
if object_class in attr_classes: | |||
break | |||
else: # executed only if the attribute class is not present in the objectClass attribute | |||
# checks if attribute is defined in one of the possible auxiliary classes | |||
for aux_class in self.entry_definition._auxiliary_class: | |||
if aux_class in attr_classes: | |||
if self._state._initial_status == STATUS_VIRTUAL: # entry is new, there must be a pending objectClass MODIFY_REPLACE | |||
self._changes['objectClass'][0][1].append(aux_class) | |||
else: | |||
self.objectclass += aux_class | |||
if self._state._initial_status == STATUS_VIRTUAL: | |||
new_attributes = dict() | |||
for attr in self._changes: | |||
new_attributes[attr] = self._changes[attr][0][1] | |||
result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls) | |||
else: | |||
result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls) | |||
if not self.entry_cursor.connection.strategy.sync: # asynchronous request | |||
response, result, request = self.entry_cursor.connection.get_response(result, get_request=True) | |||
else: | |||
response = self.entry_cursor.connection.response | |||
result = self.entry_cursor.connection.result | |||
request = self.entry_cursor.connection.request | |||
self.entry_cursor._store_operation_in_history(request, result, response) | |||
if result['result'] == RESULT_SUCCESS: | |||
if refresh: | |||
if self.entry_refresh(): | |||
if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present | |||
for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing | |||
if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes: | |||
self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing | |||
temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response) | |||
self._state.origin.__dict__.clear() | |||
self._state.origin.__dict__['_state'] = temp_entry._state | |||
for attr in self: # returns the whole attribute object | |||
if not attr.virtual: | |||
self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key] | |||
self._state.origin._state.read_time = self.entry_read_time | |||
else: | |||
self.entry_discard_changes() # if not refreshed remove committed changes | |||
self._state.set_status(STATUS_COMMITTED) | |||
return True | |||
return False | |||
def entry_discard_changes(self): | |||
self._changes.clear() | |||
self._state.set_status(self._state._initial_status) | |||
def entry_delete(self): | |||
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]: | |||
error_message = 'cannot delete entry, invalid status: ' + self.entry_status | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self._state.set_status(STATUS_READY_FOR_DELETION) | |||
def entry_refresh(self, tries=4, seconds=2): | |||
""" | |||
Refreshes the entry from the LDAP Server | |||
""" | |||
if self.entry_cursor.connection: | |||
if self.entry_cursor.refresh_entry(self, tries, seconds): | |||
return True | |||
return False | |||
def entry_move(self, destination_dn): | |||
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]: | |||
error_message = 'cannot move entry, invalid status: ' + self.entry_status | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self._state._to = safe_dn(destination_dn) | |||
self._state.set_status(STATUS_READY_FOR_MOVING) | |||
def entry_rename(self, new_name): | |||
if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]: | |||
error_message = 'cannot rename entry, invalid status: ' + self.entry_status | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPCursorError(error_message) | |||
self._state._to = new_name | |||
self._state.set_status(STATUS_READY_FOR_RENAMING) | |||
@property | |||
def entry_changes(self): | |||
return self._changes |
@@ -0,0 +1,270 @@ | |||
""" | |||
""" | |||
# Created on 2014.02.02 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from os import linesep | |||
from .attrDef import AttrDef | |||
from ..core.exceptions import LDAPKeyError, LDAPObjectError, LDAPAttributeError, LDAPSchemaError | |||
from .. import STRING_TYPES, SEQUENCE_TYPES, Server, Connection | |||
from ..protocol.rfc4512 import SchemaInfo, constant_to_class_kind | |||
from ..protocol.formatters.standard import find_attribute_validator | |||
from ..utils.ciDict import CaseInsensitiveWithAliasDict | |||
from ..utils.config import get_config_parameter | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED | |||
class ObjectDef(object): | |||
"""Represent an object in the LDAP server. AttrDefs are stored in a dictionary; the key is the friendly name defined in AttrDef. | |||
AttrDefs can be added and removed using the += ad -= operators | |||
ObjectDef can be accessed either as a sequence and a dictionary. When accessed the whole AttrDef instance is returned | |||
""" | |||
def __init__(self, object_class=None, schema=None, custom_validator=None, auxiliary_class=None): | |||
if object_class is None: | |||
object_class = [] | |||
if not isinstance(object_class, SEQUENCE_TYPES): | |||
object_class = [object_class] | |||
if auxiliary_class is None: | |||
auxiliary_class = [] | |||
if not isinstance(auxiliary_class, SEQUENCE_TYPES): | |||
auxiliary_class = [auxiliary_class] | |||
self.__dict__['_attributes'] = CaseInsensitiveWithAliasDict() | |||
self.__dict__['_custom_validator'] = custom_validator | |||
self.__dict__['_oid_info'] = [] | |||
if isinstance(schema, Connection) and (schema._deferred_bind or schema._deferred_open): # probably a lazy connection, tries to bind | |||
schema._fire_deferred() | |||
if schema is not None: | |||
if isinstance(schema, Server): | |||
schema = schema.schema | |||
elif isinstance(schema, Connection): | |||
schema = schema.server.schema | |||
elif isinstance(schema, SchemaInfo): | |||
pass | |||
elif schema: | |||
error_message = 'unable to read schema' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPSchemaError(error_message) | |||
if schema is None: | |||
error_message = 'schema not present' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPSchemaError(error_message) | |||
self.__dict__['_schema'] = schema | |||
if self._schema: | |||
object_class = [schema.object_classes[name].name[0] for name in object_class] # uses object class names capitalized as in schema | |||
auxiliary_class = [schema.object_classes[name].name[0] for name in auxiliary_class] | |||
for object_name in object_class: | |||
if object_name: | |||
self._populate_attr_defs(object_name) | |||
for object_name in auxiliary_class: | |||
if object_name: | |||
self._populate_attr_defs(object_name) | |||
self.__dict__['_object_class'] = object_class | |||
self.__dict__['_auxiliary_class'] = auxiliary_class | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated ObjectDef: <%r>', self) | |||
def _populate_attr_defs(self, object_name): | |||
if object_name in self._schema.object_classes: | |||
object_schema = self._schema.object_classes[object_name] | |||
self.__dict__['_oid_info'].append(object_name + " (" + constant_to_class_kind(object_schema.kind) + ") " + str(object_schema.oid)) | |||
if object_schema.superior: | |||
for sup in object_schema.superior: | |||
self._populate_attr_defs(sup) | |||
for attribute_name in object_schema.must_contain: | |||
self.add_from_schema(attribute_name, True) | |||
for attribute_name in object_schema.may_contain: | |||
if attribute_name not in self._attributes: # the attribute could already be defined as "mandatory" in a superclass | |||
self.add_from_schema(attribute_name, False) | |||
else: | |||
error_message = 'object class \'%s\' not defined in schema' % object_name | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPObjectError(error_message) | |||
def __repr__(self): | |||
if self._object_class: | |||
r = 'OBJ : ' + ', '.join(self._object_class) + linesep | |||
else: | |||
r = 'OBJ : <None>' + linesep | |||
if self._auxiliary_class: | |||
r += 'AUX : ' + ', '.join(self._auxiliary_class) + linesep | |||
else: | |||
r += 'AUX : <None>' + linesep | |||
r += 'OID: ' + ', '.join([oid for oid in self._oid_info]) + linesep | |||
r += 'MUST: ' + ', '.join(sorted([attr for attr in self._attributes if self._attributes[attr].mandatory])) + linesep | |||
r += 'MAY : ' + ', '.join(sorted([attr for attr in self._attributes if not self._attributes[attr].mandatory])) + linesep | |||
return r | |||
def __str__(self): | |||
return self.__repr__() | |||
def __getitem__(self, item): | |||
return self.__getattr__(item) | |||
def __getattr__(self, item): | |||
item = ''.join(item.split()).lower() | |||
if '_attributes' in self.__dict__: | |||
try: | |||
return self._attributes[item] | |||
except KeyError: | |||
error_message = 'key \'%s\' not present' % item | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
else: | |||
error_message = 'internal _attributes property not defined' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
def __setattr__(self, key, value): | |||
error_message = 'object \'%s\' is read only' % key | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPObjectError(error_message) | |||
def __iadd__(self, other): | |||
self.add_attribute(other) | |||
return self | |||
def __isub__(self, other): | |||
if isinstance(other, AttrDef): | |||
self.remove_attribute(other.key) | |||
elif isinstance(other, STRING_TYPES): | |||
self.remove_attribute(other) | |||
return self | |||
def __iter__(self): | |||
for attribute in self._attributes: | |||
yield self._attributes[attribute] | |||
def __len__(self): | |||
return len(self._attributes) | |||
if str is not bytes: # Python 3 | |||
def __bool__(self): # needed to make the objectDef appears as existing in "if cursor:" even if there are no entries | |||
return True | |||
else: # Python 2 | |||
def __nonzero__(self): | |||
return True | |||
def __contains__(self, item): | |||
try: | |||
self.__getitem__(item) | |||
except KeyError: | |||
return False | |||
return True | |||
def add_from_schema(self, attribute_name, mandatory=False): | |||
attr_def = AttrDef(attribute_name) | |||
attr_def.validate = find_attribute_validator(self._schema, attribute_name, self._custom_validator) | |||
attr_def.mandatory = mandatory # in schema mandatory is specified in the object class, not in the attribute class | |||
if self._schema and self._schema.attribute_types and attribute_name in self._schema.attribute_types: | |||
attr_def.single_value = self._schema.attribute_types[attribute_name].single_value | |||
attr_def.oid_info = self._schema.attribute_types[attribute_name] | |||
self.add_attribute(attr_def) | |||
def add_attribute(self, definition=None): | |||
"""Add an AttrDef to the ObjectDef. Can be called with the += operator. | |||
:param definition: the AttrDef object to add, can also be a string containing the name of attribute to add. Can be a list of both | |||
""" | |||
conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')] | |||
if isinstance(definition, STRING_TYPES): | |||
self.add_from_schema(definition) | |||
elif isinstance(definition, AttrDef): | |||
if definition.key.lower() not in conf_attributes_excluded_from_object_def: | |||
if definition.key not in self._attributes: | |||
self._attributes[definition.key] = definition | |||
if definition.name and definition.name != definition.key: | |||
self._attributes.set_alias(definition.key, definition.name) | |||
other_names = [name for name in definition.oid_info.name if definition.key.lower() != name.lower()] if definition.oid_info else None | |||
if other_names: | |||
self._attributes.set_alias(definition.key, other_names) | |||
if not definition.validate: | |||
validator = find_attribute_validator(self._schema, definition.key, self._custom_validator) | |||
self._attributes[definition.key].validate = validator | |||
elif isinstance(definition, SEQUENCE_TYPES): | |||
for element in definition: | |||
self.add_attribute(element) | |||
else: | |||
error_message = 'unable to add element to object definition' | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPObjectError(error_message) | |||
def remove_attribute(self, item): | |||
"""Remove an AttrDef from the ObjectDef. Can be called with the -= operator. | |||
:param item: the AttrDef to remove, can also be a string containing the name of attribute to remove | |||
""" | |||
key = None | |||
if isinstance(item, STRING_TYPES): | |||
key = ''.join(item.split()).lower() | |||
elif isinstance(item, AttrDef): | |||
key = item.key.lower() | |||
if key: | |||
for attr in self._attributes: | |||
if key == attr.lower(): | |||
del self._attributes[attr] | |||
break | |||
else: | |||
error_message = 'key \'%s\' not present' % key | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPKeyError(error_message) | |||
else: | |||
error_message = 'key type must be str or AttrDef not ' + str(type(item)) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', error_message, self) | |||
raise LDAPAttributeError(error_message) | |||
def clear_attributes(self): | |||
"""Empty the ObjectDef attribute list | |||
""" | |||
self.__dict__['object_class'] = None | |||
self.__dict__['auxiliary_class'] = None | |||
self.__dict__['_attributes'] = dict() |
@@ -0,0 +1,597 @@ | |||
""" | |||
""" | |||
# Created on 2014.05.14 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from os import sep | |||
from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \ | |||
RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \ | |||
RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \ | |||
RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \ | |||
RESULT_CONSTRAINT_VIOLATION, \ | |||
RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \ | |||
RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \ | |||
RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \ | |||
RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \ | |||
RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \ | |||
RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \ | |||
RESULT_NOT_ALLOWED_ON_NON_LEAF, \ | |||
RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \ | |||
RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \ | |||
RESULT_INAPPROPRIATE_AUTHENTICATION | |||
import socket | |||
# LDAPException hierarchy | |||
class LDAPException(Exception): | |||
pass | |||
class LDAPOperationResult(LDAPException): | |||
def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None): | |||
if cls is LDAPOperationResult and result and result in exception_table: | |||
exc = super(LDAPOperationResult, exception_table[result]).__new__( | |||
exception_table[result]) # create an exception of the required result error | |||
exc.result = result | |||
exc.description = description | |||
exc.dn = dn | |||
exc.message = message | |||
exc.type = response_type | |||
exc.response = response | |||
else: | |||
exc = super(LDAPOperationResult, cls).__new__(cls) | |||
return exc | |||
def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None): | |||
self.result = result | |||
self.description = description | |||
self.dn = dn | |||
self.message = message | |||
self.type = response_type | |||
self.response = response | |||
def __str__(self): | |||
s = [self.__class__.__name__, | |||
str(self.result) if self.result else None, | |||
self.description if self.description else None, | |||
self.dn if self.dn else None, | |||
self.message if self.message else None, | |||
self.type if self.type else None, | |||
self.response if self.response else None] | |||
return ' - '.join([str(item) for item in s if s is not None]) | |||
def __repr__(self): | |||
return self.__str__() | |||
class LDAPOperationsErrorResult(LDAPOperationResult): | |||
pass | |||
class LDAPProtocolErrorResult(LDAPOperationResult): | |||
pass | |||
class LDAPTimeLimitExceededResult(LDAPOperationResult): | |||
pass | |||
class LDAPSizeLimitExceededResult(LDAPOperationResult): | |||
pass | |||
class LDAPAuthMethodNotSupportedResult(LDAPOperationResult): | |||
pass | |||
class LDAPStrongerAuthRequiredResult(LDAPOperationResult): | |||
pass | |||
class LDAPReferralResult(LDAPOperationResult): | |||
pass | |||
class LDAPAdminLimitExceededResult(LDAPOperationResult): | |||
pass | |||
class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult): | |||
pass | |||
class LDAPConfidentialityRequiredResult(LDAPOperationResult): | |||
pass | |||
class LDAPSASLBindInProgressResult(LDAPOperationResult): | |||
pass | |||
class LDAPNoSuchAttributeResult(LDAPOperationResult): | |||
pass | |||
class LDAPUndefinedAttributeTypeResult(LDAPOperationResult): | |||
pass | |||
class LDAPInappropriateMatchingResult(LDAPOperationResult): | |||
pass | |||
class LDAPConstraintViolationResult(LDAPOperationResult): | |||
pass | |||
class LDAPAttributeOrValueExistsResult(LDAPOperationResult): | |||
pass | |||
class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult): | |||
pass | |||
class LDAPNoSuchObjectResult(LDAPOperationResult): | |||
pass | |||
class LDAPAliasProblemResult(LDAPOperationResult): | |||
pass | |||
class LDAPInvalidDNSyntaxResult(LDAPOperationResult): | |||
pass | |||
class LDAPAliasDereferencingProblemResult(LDAPOperationResult): | |||
pass | |||
class LDAPInappropriateAuthenticationResult(LDAPOperationResult): | |||
pass | |||
class LDAPInvalidCredentialsResult(LDAPOperationResult): | |||
pass | |||
class LDAPInsufficientAccessRightsResult(LDAPOperationResult): | |||
pass | |||
class LDAPBusyResult(LDAPOperationResult): | |||
pass | |||
class LDAPUnavailableResult(LDAPOperationResult): | |||
pass | |||
class LDAPUnwillingToPerformResult(LDAPOperationResult): | |||
pass | |||
class LDAPLoopDetectedResult(LDAPOperationResult): | |||
pass | |||
class LDAPNamingViolationResult(LDAPOperationResult): | |||
pass | |||
class LDAPObjectClassViolationResult(LDAPOperationResult): | |||
pass | |||
class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult): | |||
pass | |||
class LDAPNotAllowedOnRDNResult(LDAPOperationResult): | |||
pass | |||
class LDAPEntryAlreadyExistsResult(LDAPOperationResult): | |||
pass | |||
class LDAPObjectClassModsProhibitedResult(LDAPOperationResult): | |||
pass | |||
class LDAPAffectMultipleDSASResult(LDAPOperationResult): | |||
pass | |||
class LDAPOtherResult(LDAPOperationResult): | |||
pass | |||
class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult): | |||
pass | |||
class LDAPLCUPSecurityViolationResult(LDAPOperationResult): | |||
pass | |||
class LDAPLCUPInvalidDataResult(LDAPOperationResult): | |||
pass | |||
class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult): | |||
pass | |||
class LDAPLCUPReloadRequiredResult(LDAPOperationResult): | |||
pass | |||
class LDAPCanceledResult(LDAPOperationResult): | |||
pass | |||
class LDAPNoSuchOperationResult(LDAPOperationResult): | |||
pass | |||
class LDAPTooLateResult(LDAPOperationResult): | |||
pass | |||
class LDAPCannotCancelResult(LDAPOperationResult): | |||
pass | |||
class LDAPAssertionFailedResult(LDAPOperationResult): | |||
pass | |||
class LDAPAuthorizationDeniedResult(LDAPOperationResult): | |||
pass | |||
class LDAPESyncRefreshRequiredResult(LDAPOperationResult): | |||
pass | |||
exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult, | |||
RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult, | |||
RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult, | |||
RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult, | |||
RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult, | |||
RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult, | |||
RESULT_REFERRAL: LDAPReferralResult, | |||
RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult, | |||
RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult, | |||
RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult, | |||
RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult, | |||
RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult, | |||
RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult, | |||
RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult, | |||
RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult, | |||
RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult, | |||
RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult, | |||
RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult, | |||
RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult, | |||
RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult, | |||
RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult, | |||
RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult, | |||
RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult, | |||
RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult, | |||
RESULT_BUSY: LDAPBusyResult, | |||
RESULT_UNAVAILABLE: LDAPUnavailableResult, | |||
RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult, | |||
RESULT_LOOP_DETECTED: LDAPLoopDetectedResult, | |||
RESULT_NAMING_VIOLATION: LDAPNamingViolationResult, | |||
RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult, | |||
RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult, | |||
RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult, | |||
RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult, | |||
RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult, | |||
RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult, | |||
RESULT_OTHER: LDAPOtherResult, | |||
RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult, | |||
RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult, | |||
RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult, | |||
RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult, | |||
RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult, | |||
RESULT_CANCELED: LDAPCanceledResult, | |||
RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult, | |||
RESULT_TOO_LATE: LDAPTooLateResult, | |||
RESULT_CANNOT_CANCEL: LDAPCannotCancelResult, | |||
RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult, | |||
RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult, | |||
RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult} | |||
class LDAPExceptionError(LDAPException): | |||
pass | |||
# configuration exceptions | |||
class LDAPConfigurationError(LDAPExceptionError): | |||
pass | |||
class LDAPUnknownStrategyError(LDAPConfigurationError): | |||
pass | |||
class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError): | |||
pass | |||
class LDAPSSLConfigurationError(LDAPConfigurationError): | |||
pass | |||
class LDAPDefinitionError(LDAPConfigurationError): | |||
pass | |||
class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError): | |||
pass | |||
class LDAPConfigurationParameterError(LDAPConfigurationError): | |||
pass | |||
# abstract layer exceptions | |||
class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError): | |||
pass | |||
class LDAPObjectError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError): | |||
pass | |||
class LDAPCursorError(LDAPExceptionError): | |||
pass | |||
class LDAPObjectDereferenceError(LDAPExceptionError): | |||
pass | |||
# security exceptions | |||
class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError): | |||
pass | |||
class LDAPInvalidTlsSpecificationError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError): | |||
pass | |||
# connection exceptions | |||
class LDAPBindError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidServerError(LDAPExceptionError): | |||
pass | |||
class LDAPSASLMechanismNotSupportedError(LDAPExceptionError): | |||
pass | |||
class LDAPConnectionIsReadOnlyError(LDAPExceptionError): | |||
pass | |||
class LDAPChangeError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPServerPoolError(LDAPExceptionError): | |||
pass | |||
class LDAPServerPoolExhaustedError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidPortError(LDAPExceptionError): | |||
pass | |||
class LDAPStartTLSError(LDAPExceptionError): | |||
pass | |||
class LDAPCertificateError(LDAPExceptionError): | |||
pass | |||
class LDAPUserNameNotAllowedError(LDAPExceptionError): | |||
pass | |||
class LDAPUserNameIsMandatoryError(LDAPExceptionError): | |||
pass | |||
class LDAPPasswordIsMandatoryError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidFilterError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidScopeError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPInvalidValueError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPControlError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPExtensionError(LDAPExceptionError, ValueError): | |||
pass | |||
class LDAPLDIFError(LDAPExceptionError): | |||
pass | |||
class LDAPSchemaError(LDAPExceptionError): | |||
pass | |||
class LDAPSASLPrepError(LDAPExceptionError): | |||
pass | |||
class LDAPSASLBindInProgressError(LDAPExceptionError): | |||
pass | |||
class LDAPMetricsError(LDAPExceptionError): | |||
pass | |||
class LDAPObjectClassError(LDAPExceptionError): | |||
pass | |||
class LDAPInvalidDnError(LDAPExceptionError): | |||
pass | |||
class LDAPResponseTimeoutError(LDAPExceptionError): | |||
pass | |||
class LDAPTransactionError(LDAPExceptionError): | |||
pass | |||
# communication exceptions | |||
class LDAPCommunicationError(LDAPExceptionError): | |||
pass | |||
class LDAPSocketOpenError(LDAPCommunicationError): | |||
pass | |||
class LDAPSocketCloseError(LDAPCommunicationError): | |||
pass | |||
class LDAPSocketReceiveError(LDAPCommunicationError, socket.error): | |||
pass | |||
class LDAPSocketSendError(LDAPCommunicationError, socket.error): | |||
pass | |||
class LDAPSessionTerminatedByServerError(LDAPCommunicationError): | |||
pass | |||
class LDAPUnknownResponseError(LDAPCommunicationError): | |||
pass | |||
class LDAPUnknownRequestError(LDAPCommunicationError): | |||
pass | |||
class LDAPReferralError(LDAPCommunicationError): | |||
pass | |||
# pooling exceptions | |||
class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError): | |||
pass | |||
class LDAPConnectionPoolNotStartedError(LDAPExceptionError): | |||
pass | |||
# restartable strategy | |||
class LDAPMaximumRetriesError(LDAPExceptionError): | |||
def __str__(self): | |||
s = [] | |||
if self.args: | |||
if isinstance(self.args, tuple): | |||
if len(self.args) > 0: | |||
s.append('LDAPMaximumRetriesError: ' + str(self.args[0])) | |||
if len(self.args) > 1: | |||
s.append('Exception history:') | |||
prev_exc = '' | |||
for i, exc in enumerate(self.args[1]): # args[1] contains exception history | |||
if str(exc[1]) != prev_exc: | |||
s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2]))) | |||
prev_exc = str(exc[1]) | |||
if len(self.args) > 2: | |||
s.append('Maximum number of retries reached: ' + str(self.args[2])) | |||
else: | |||
s = [LDAPExceptionError.__str__(self)] | |||
return sep.join(s) | |||
# exception factories | |||
def communication_exception_factory(exc_to_raise, exc): | |||
""" | |||
Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter | |||
""" | |||
if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]: | |||
return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) | |||
else: | |||
raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) | |||
def start_tls_exception_factory(exc_to_raise, exc): | |||
""" | |||
Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter | |||
""" | |||
if exc_to_raise.__name__ == 'LDAPStartTLSError': | |||
return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict()) | |||
else: | |||
raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise)) |
@@ -0,0 +1,306 @@ | |||
""" | |||
""" | |||
# Created on 2014.03.14 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from datetime import datetime, MINYEAR | |||
from os import linesep | |||
from random import randint | |||
from time import sleep | |||
from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter | |||
from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError | |||
from .server import Server | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK | |||
POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM] | |||
class ServerPoolState(object): | |||
def __init__(self, server_pool): | |||
self.servers = [] # each element is a list: [server, last_checked_time, available] | |||
self.strategy = server_pool.strategy | |||
self.server_pool = server_pool | |||
self.last_used_server = 0 | |||
self.refresh() | |||
self.initialize_time = datetime.now() | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated ServerPoolState: <%r>', self) | |||
def __str__(self): | |||
s = 'servers: ' + linesep | |||
if self.servers: | |||
for server in self.servers: | |||
s += str(server[0]) + linesep | |||
else: | |||
s += 'None' + linesep | |||
s += 'Pool strategy: ' + str(self.strategy) + linesep | |||
s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.servers[self.last_used_server][0])) | |||
return s | |||
def refresh(self): | |||
self.servers = [] | |||
for server in self.server_pool.servers: | |||
self.servers.append([server, datetime(MINYEAR, 1, 1), True]) # server, smallest date ever, supposed available | |||
self.last_used_server = randint(0, len(self.servers) - 1) | |||
def get_current_server(self): | |||
return self.servers[self.last_used_server][0] | |||
def get_server(self): | |||
if self.servers: | |||
if self.server_pool.strategy == FIRST: | |||
if self.server_pool.active: | |||
# returns the first active server | |||
self.last_used_server = self.find_active_server(starting=0) | |||
else: | |||
# returns always the first server - no pooling | |||
self.last_used_server = 0 | |||
elif self.server_pool.strategy == ROUND_ROBIN: | |||
if self.server_pool.active: | |||
# returns the next active server in a circular range | |||
self.last_used_server = self.find_active_server(self.last_used_server + 1) | |||
else: | |||
# returns the next server in a circular range | |||
self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.servers) else 0 | |||
elif self.server_pool.strategy == RANDOM: | |||
if self.server_pool.active: | |||
self.last_used_server = self.find_active_random_server() | |||
else: | |||
# returns a random server in the pool | |||
self.last_used_server = randint(0, len(self.servers) - 1) | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy) | |||
raise LDAPUnknownStrategyError('unknown server pooling strategy') | |||
if log_enabled(BASIC): | |||
log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server) | |||
return self.servers[self.last_used_server][0] | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'no servers in Server Pool <%s>', self) | |||
raise LDAPServerPoolError('no servers in server pool') | |||
def find_active_random_server(self): | |||
counter = self.server_pool.active # can be True for "forever" or the number of cycles to try | |||
while counter: | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'entering loop for finding active server in pool <%s>', self) | |||
temp_list = self.servers[:] # copy | |||
while temp_list: | |||
# pops a random server from a temp list and checks its | |||
# availability, if not available tries another one | |||
server = temp_list.pop(randint(0, len(temp_list) - 1)) | |||
if not server[2]: # server is offline | |||
if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server[1]).seconds < self.server_pool.exhaust: # keeps server offline | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'server <%s> excluded from checking because it is offline', server[0]) | |||
continue | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'server <%s> reinserted in pool', server[0]) | |||
server[1] = datetime.now() | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'checking server <%s> for availability', server[0]) | |||
if server[0].check_availability(): | |||
# returns a random active server in the pool | |||
server[2] = True | |||
return self.servers.index(server) | |||
else: | |||
server[2] = False | |||
if not isinstance(self.server_pool.active, bool): | |||
counter -= 1 | |||
if log_enabled(ERROR): | |||
log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self) | |||
raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries') | |||
def find_active_server(self, starting): | |||
conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT') | |||
counter = self.server_pool.active # can be True for "forever" or the number of cycles to try | |||
if starting >= len(self.servers): | |||
starting = 0 | |||
while counter: | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self) | |||
index = -1 | |||
pool_size = len(self.servers) | |||
while index < pool_size - 1: | |||
index += 1 | |||
offset = index + starting if index + starting < pool_size else index + starting - pool_size | |||
if not self.servers[offset][2]: # server is offline | |||
if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - self.servers[offset][1]).seconds < self.server_pool.exhaust: # keeps server offline | |||
if log_enabled(NETWORK): | |||
if isinstance(self.server_pool.exhaust, bool): | |||
log(NETWORK, 'server <%s> excluded from checking because is offline', self.servers[offset][0]) | |||
else: | |||
log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', self.servers[offset][0], (self.server_pool.exhaust - (datetime.now() - self.servers[offset][1]).seconds)) | |||
continue | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'server <%s> reinserted in pool', self.servers[offset][0]) | |||
self.servers[offset][1] = datetime.now() | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'checking server <%s> for availability', self.servers[offset][0]) | |||
if self.servers[offset][0].check_availability(): | |||
self.servers[offset][2] = True | |||
return offset | |||
else: | |||
self.servers[offset][2] = False # sets server offline | |||
if not isinstance(self.server_pool.active, bool): | |||
counter -= 1 | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout) | |||
sleep(conf_pool_timeout) | |||
if log_enabled(ERROR): | |||
log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self) | |||
raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries') | |||
def __len__(self): | |||
return len(self.servers) | |||
class ServerPool(object): | |||
def __init__(self, | |||
servers=None, | |||
pool_strategy=ROUND_ROBIN, | |||
active=True, | |||
exhaust=False): | |||
if pool_strategy not in POOLING_STRATEGIES: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'unknown pooling strategy <%s>', pool_strategy) | |||
raise LDAPUnknownStrategyError('unknown pooling strategy') | |||
if exhaust and not active: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'cannot instantiate pool with exhaust and not active') | |||
raise LDAPServerPoolError('pools can be exhausted only when checking for active servers') | |||
self.servers = [] | |||
self.pool_states = dict() | |||
self.active = active | |||
self.exhaust = exhaust | |||
if isinstance(servers, SEQUENCE_TYPES + (Server, )): | |||
self.add(servers) | |||
elif isinstance(servers, STRING_TYPES): | |||
self.add(Server(servers)) | |||
self.strategy = pool_strategy | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated ServerPool: <%r>', self) | |||
def __str__(self): | |||
s = 'servers: ' + linesep | |||
if self.servers: | |||
for server in self.servers: | |||
s += str(server) + linesep | |||
else: | |||
s += 'None' + linesep | |||
s += 'Pool strategy: ' + str(self.strategy) | |||
s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False') | |||
s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False') | |||
return s | |||
def __repr__(self): | |||
r = 'ServerPool(servers=' | |||
if self.servers: | |||
r += '[' | |||
for server in self.servers: | |||
r += server.__repr__() + ', ' | |||
r = r[:-2] + ']' | |||
else: | |||
r += 'None' | |||
r += ', pool_strategy={0.strategy!r}'.format(self) | |||
r += ', active={0.active!r}'.format(self) | |||
r += ', exhaust={0.exhaust!r}'.format(self) | |||
r += ')' | |||
return r | |||
def __len__(self): | |||
return len(self.servers) | |||
def __getitem__(self, item): | |||
return self.servers[item] | |||
def __iter__(self): | |||
return self.servers.__iter__() | |||
def add(self, servers): | |||
if isinstance(servers, Server): | |||
if servers not in self.servers: | |||
self.servers.append(servers) | |||
elif isinstance(servers, STRING_TYPES): | |||
self.servers.append(Server(servers)) | |||
elif isinstance(servers, SEQUENCE_TYPES): | |||
for server in servers: | |||
if isinstance(server, Server): | |||
self.servers.append(server) | |||
elif isinstance(server, STRING_TYPES): | |||
self.servers.append(Server(server)) | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'element must be a server in Server Pool <%s>', self) | |||
raise LDAPServerPoolError('server in ServerPool must be a Server') | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self) | |||
raise LDAPServerPoolError('server must be a Server or a list of Server') | |||
for connection in self.pool_states: | |||
# notifies connections using this pool to refresh | |||
self.pool_states[connection].refresh() | |||
def remove(self, server): | |||
if server in self.servers: | |||
self.servers.remove(server) | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self) | |||
raise LDAPServerPoolError('server not in server pool') | |||
for connection in self.pool_states: | |||
# notifies connections using this pool to refresh | |||
self.pool_states[connection].refresh() | |||
def initialize(self, connection): | |||
pool_state = ServerPoolState(self) | |||
# registers pool_state in ServerPool object | |||
self.pool_states[connection] = pool_state | |||
def get_server(self, connection): | |||
if connection in self.pool_states: | |||
return self.pool_states[connection].get_server() | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) | |||
raise LDAPServerPoolError('connection not in ServerPoolState') | |||
def get_current_server(self, connection): | |||
if connection in self.pool_states: | |||
return self.pool_states[connection].get_current_server() | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self) | |||
raise LDAPServerPoolError('connection not in ServerPoolState') |
@@ -0,0 +1,137 @@ | |||
""" | |||
""" | |||
# Created on 2016.08.31 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
# result codes | |||
RESULT_SUCCESS = 0 | |||
RESULT_OPERATIONS_ERROR = 1 | |||
RESULT_PROTOCOL_ERROR = 2 | |||
RESULT_TIME_LIMIT_EXCEEDED = 3 | |||
RESULT_SIZE_LIMIT_EXCEEDED = 4 | |||
RESULT_COMPARE_FALSE = 5 | |||
RESULT_COMPARE_TRUE = 6 | |||
RESULT_AUTH_METHOD_NOT_SUPPORTED = 7 | |||
RESULT_STRONGER_AUTH_REQUIRED = 8 | |||
RESULT_RESERVED = 9 | |||
RESULT_REFERRAL = 10 | |||
RESULT_ADMIN_LIMIT_EXCEEDED = 11 | |||
RESULT_UNAVAILABLE_CRITICAL_EXTENSION = 12 | |||
RESULT_CONFIDENTIALITY_REQUIRED = 13 | |||
RESULT_SASL_BIND_IN_PROGRESS = 14 | |||
RESULT_NO_SUCH_ATTRIBUTE = 16 | |||
RESULT_UNDEFINED_ATTRIBUTE_TYPE = 17 | |||
RESULT_INAPPROPRIATE_MATCHING = 18 | |||
RESULT_CONSTRAINT_VIOLATION = 19 | |||
RESULT_ATTRIBUTE_OR_VALUE_EXISTS = 20 | |||
RESULT_INVALID_ATTRIBUTE_SYNTAX = 21 | |||
RESULT_NO_SUCH_OBJECT = 32 | |||
RESULT_ALIAS_PROBLEM = 33 | |||
RESULT_INVALID_DN_SYNTAX = 34 | |||
RESULT_ALIAS_DEREFERENCING_PROBLEM = 36 | |||
RESULT_INAPPROPRIATE_AUTHENTICATION = 48 | |||
RESULT_INVALID_CREDENTIALS = 49 | |||
RESULT_INSUFFICIENT_ACCESS_RIGHTS = 50 | |||
RESULT_BUSY = 51 | |||
RESULT_UNAVAILABLE = 52 | |||
RESULT_UNWILLING_TO_PERFORM = 53 | |||
RESULT_LOOP_DETECTED = 54 | |||
RESULT_NAMING_VIOLATION = 64 | |||
RESULT_OBJECT_CLASS_VIOLATION = 65 | |||
RESULT_NOT_ALLOWED_ON_NON_LEAF = 66 | |||
RESULT_NOT_ALLOWED_ON_RDN = 67 | |||
RESULT_ENTRY_ALREADY_EXISTS = 68 | |||
RESULT_OBJECT_CLASS_MODS_PROHIBITED = 69 | |||
RESULT_AFFECT_MULTIPLE_DSAS = 71 | |||
RESULT_OTHER = 80 | |||
RESULT_LCUP_RESOURCES_EXHAUSTED = 113 | |||
RESULT_LCUP_SECURITY_VIOLATION = 114 | |||
RESULT_LCUP_INVALID_DATA = 115 | |||
RESULT_LCUP_UNSUPPORTED_SCHEME = 116 | |||
RESULT_LCUP_RELOAD_REQUIRED = 117 | |||
RESULT_CANCELED = 118 | |||
RESULT_NO_SUCH_OPERATION = 119 | |||
RESULT_TOO_LATE = 120 | |||
RESULT_CANNOT_CANCEL = 121 | |||
RESULT_ASSERTION_FAILED = 122 | |||
RESULT_AUTHORIZATION_DENIED = 123 | |||
RESULT_E_SYNC_REFRESH_REQUIRED = 4096 | |||
RESULT_CODES = { | |||
RESULT_SUCCESS: 'success', | |||
RESULT_OPERATIONS_ERROR: 'operationsError', | |||
RESULT_PROTOCOL_ERROR: 'protocolError', | |||
RESULT_TIME_LIMIT_EXCEEDED: 'timeLimitExceeded', | |||
RESULT_SIZE_LIMIT_EXCEEDED: 'sizeLimitExceeded', | |||
RESULT_COMPARE_FALSE: 'compareFalse', | |||
RESULT_COMPARE_TRUE: 'compareTrue', | |||
RESULT_AUTH_METHOD_NOT_SUPPORTED: 'authMethodNotSupported', | |||
RESULT_RESERVED: 'reserved', | |||
RESULT_STRONGER_AUTH_REQUIRED: 'strongerAuthRequired', | |||
RESULT_REFERRAL: 'referral', | |||
RESULT_ADMIN_LIMIT_EXCEEDED: 'adminLimitExceeded', | |||
RESULT_UNAVAILABLE_CRITICAL_EXTENSION: 'unavailableCriticalExtension', | |||
RESULT_CONFIDENTIALITY_REQUIRED: 'confidentialityRequired', | |||
RESULT_SASL_BIND_IN_PROGRESS: 'saslBindInProgress', | |||
RESULT_NO_SUCH_ATTRIBUTE: 'noSuchAttribute', | |||
RESULT_UNDEFINED_ATTRIBUTE_TYPE: 'undefinedAttributeType', | |||
RESULT_INAPPROPRIATE_MATCHING: 'inappropriateMatching', | |||
RESULT_CONSTRAINT_VIOLATION: 'constraintViolation', | |||
RESULT_ATTRIBUTE_OR_VALUE_EXISTS: 'attributeOrValueExists', | |||
RESULT_INVALID_ATTRIBUTE_SYNTAX: 'invalidAttributeSyntax', | |||
RESULT_NO_SUCH_OBJECT: 'noSuchObject', | |||
RESULT_ALIAS_PROBLEM: 'aliasProblem', | |||
RESULT_INVALID_DN_SYNTAX: 'invalidDNSyntax', | |||
RESULT_ALIAS_DEREFERENCING_PROBLEM: 'aliasDereferencingProblem', | |||
RESULT_INAPPROPRIATE_AUTHENTICATION: 'inappropriateAuthentication', | |||
RESULT_INVALID_CREDENTIALS: 'invalidCredentials', | |||
RESULT_INSUFFICIENT_ACCESS_RIGHTS: 'insufficientAccessRights', | |||
RESULT_BUSY: 'busy', | |||
RESULT_UNAVAILABLE: 'unavailable', | |||
RESULT_UNWILLING_TO_PERFORM: 'unwillingToPerform', | |||
RESULT_LOOP_DETECTED: 'loopDetected', | |||
RESULT_NAMING_VIOLATION: 'namingViolation', | |||
RESULT_OBJECT_CLASS_VIOLATION: 'objectClassViolation', | |||
RESULT_NOT_ALLOWED_ON_NON_LEAF: 'notAllowedOnNonLeaf', | |||
RESULT_NOT_ALLOWED_ON_RDN: 'notAllowedOnRDN', | |||
RESULT_ENTRY_ALREADY_EXISTS: 'entryAlreadyExists', | |||
RESULT_OBJECT_CLASS_MODS_PROHIBITED: 'objectClassModsProhibited', | |||
RESULT_AFFECT_MULTIPLE_DSAS: 'affectMultipleDSAs', | |||
RESULT_OTHER: 'other', | |||
RESULT_LCUP_RESOURCES_EXHAUSTED: 'lcupResourcesExhausted', | |||
RESULT_LCUP_SECURITY_VIOLATION: 'lcupSecurityViolation', | |||
RESULT_LCUP_INVALID_DATA: 'lcupInvalidData', | |||
RESULT_LCUP_UNSUPPORTED_SCHEME: 'lcupUnsupportedScheme', | |||
RESULT_LCUP_RELOAD_REQUIRED: 'lcupReloadRequired', | |||
RESULT_CANCELED: 'canceled', | |||
RESULT_NO_SUCH_OPERATION: 'noSuchOperation', | |||
RESULT_TOO_LATE: 'tooLate', | |||
RESULT_CANNOT_CANCEL: 'cannotCancel', | |||
RESULT_ASSERTION_FAILED: 'assertionFailed', | |||
RESULT_AUTHORIZATION_DENIED: 'authorizationDenied', | |||
RESULT_E_SYNC_REFRESH_REQUIRED: 'e-syncRefreshRequired' | |||
} | |||
# do not raise exception for (in raise_exceptions connection mode) | |||
DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS] |
@@ -0,0 +1,572 @@ | |||
""" | |||
""" | |||
# Created on 2014.05.31 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
import socket | |||
from threading import Lock | |||
from datetime import datetime, MINYEAR | |||
from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES | |||
from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError | |||
from ..protocol.formatters.standard import format_attribute_values | |||
from ..protocol.rfc4511 import LDAP_MAX_INT | |||
from ..protocol.rfc4512 import SchemaInfo, DsaInfo | |||
from .tls import Tls | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL | |||
from ..utils.conv import to_unicode | |||
try: | |||
from urllib.parse import unquote # Python 3 | |||
except ImportError: | |||
from urllib import unquote # Python 2 | |||
try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme) | |||
# noinspection PyUnresolvedReferences | |||
from socket import AF_UNIX | |||
unix_socket_available = True | |||
except ImportError: | |||
unix_socket_available = False | |||
class Server(object): | |||
""" | |||
LDAP Server definition class | |||
Allowed_referral_hosts can be None (default), or a list of tuples of | |||
allowed servers ip address or names to contact while redirecting | |||
search to referrals. | |||
The second element of the tuple is a boolean to indicate if | |||
authentication to that server is allowed; if False only anonymous | |||
bind will be used. | |||
Per RFC 4516. Use [('*', False)] to allow any host with anonymous | |||
bind, use [('*', True)] to allow any host with same authentication of | |||
Server. | |||
""" | |||
_message_counter = 0 | |||
_message_id_lock = Lock() # global lock for message_id shared by all Server objects | |||
def __init__(self, | |||
host, | |||
port=None, | |||
use_ssl=False, | |||
allowed_referral_hosts=None, | |||
get_info=SCHEMA, | |||
tls=None, | |||
formatter=None, | |||
connect_timeout=None, | |||
mode=IP_V6_PREFERRED, | |||
validator=None): | |||
self.ipc = False | |||
url_given = False | |||
host = host.strip() | |||
if host.lower().startswith('ldap://'): | |||
self.host = host[7:] | |||
use_ssl = False | |||
url_given = True | |||
elif host.lower().startswith('ldaps://'): | |||
self.host = host[8:] | |||
use_ssl = True | |||
url_given = True | |||
elif host.lower().startswith('ldapi://') and unix_socket_available: | |||
self.ipc = True | |||
use_ssl = False | |||
url_given = True | |||
elif host.lower().startswith('ldapi://') and not unix_socket_available: | |||
raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present') | |||
else: | |||
self.host = host | |||
if self.ipc: | |||
if str is bytes: # Python 2 | |||
self.host = unquote(host[7:]).decode('utf-8') | |||
else: # Python 3 | |||
self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3 | |||
self.port = None | |||
elif ':' in self.host and self.host.count(':') == 1: | |||
hostname, _, hostport = self.host.partition(':') | |||
try: | |||
port = int(hostport) or port | |||
except ValueError: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'port <%s> must be an integer', port) | |||
raise LDAPInvalidPortError('port must be an integer') | |||
self.host = hostname | |||
elif url_given and self.host.startswith('['): | |||
hostname, sep, hostport = self.host[1:].partition(']') | |||
if sep != ']' or not self._is_ipv6(hostname): | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid IPv6 server address for <%s>', self.host) | |||
raise LDAPInvalidServerError() | |||
if len(hostport): | |||
if not hostport.startswith(':'): | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid URL in server name for <%s>', self.host) | |||
raise LDAPInvalidServerError('invalid URL in server name') | |||
if not hostport[1:].isdecimal(): | |||
if log_enabled(ERROR): | |||
log(ERROR, 'port must be an integer for <%s>', self.host) | |||
raise LDAPInvalidPortError('port must be an integer') | |||
port = int(hostport[1:]) | |||
self.host = hostname | |||
elif not url_given and self._is_ipv6(self.host): | |||
pass | |||
elif self.host.count(':') > 1: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid server address for <%s>', self.host) | |||
raise LDAPInvalidServerError() | |||
if not self.ipc: | |||
self.host.rstrip('/') | |||
if not use_ssl and not port: | |||
port = 389 | |||
elif use_ssl and not port: | |||
port = 636 | |||
if isinstance(port, int): | |||
if port in range(0, 65535): | |||
self.port = port | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'port <%s> must be in range from 0 to 65535', port) | |||
raise LDAPInvalidPortError('port must in range from 0 to 65535') | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'port <%s> must be an integer', port) | |||
raise LDAPInvalidPortError('port must be an integer') | |||
if allowed_referral_hosts is None: # defaults to any server with authentication | |||
allowed_referral_hosts = [('*', True)] | |||
if isinstance(allowed_referral_hosts, SEQUENCE_TYPES): | |||
self.allowed_referral_hosts = [] | |||
for referral_host in allowed_referral_hosts: | |||
if isinstance(referral_host, tuple): | |||
if isinstance(referral_host[1], bool): | |||
self.allowed_referral_hosts.append(referral_host) | |||
elif isinstance(allowed_referral_hosts, tuple): | |||
if isinstance(allowed_referral_hosts[1], bool): | |||
self.allowed_referral_hosts = [allowed_referral_hosts] | |||
else: | |||
self.allowed_referral_hosts = [] | |||
self.ssl = True if use_ssl else False | |||
if tls and not isinstance(tls, Tls): | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid tls specification: <%s>', tls) | |||
raise LDAPInvalidTlsSpecificationError('invalid Tls object') | |||
self.tls = Tls() if self.ssl and not tls else tls | |||
if not self.ipc: | |||
if self._is_ipv6(self.host): | |||
self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port) | |||
else: | |||
self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port) | |||
else: | |||
self.name = host | |||
self.get_info = get_info | |||
self._dsa_info = None | |||
self._schema_info = None | |||
self.dit_lock = Lock() | |||
self.custom_formatter = formatter | |||
self.custom_validator = validator | |||
self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called) | |||
self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever | |||
self.current_address = None | |||
self.connect_timeout = connect_timeout | |||
self.mode = mode | |||
self.get_info_from_server(None) # load offline schema if needed | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Server: <%r>', self) | |||
@staticmethod | |||
def _is_ipv6(host): | |||
try: | |||
socket.inet_pton(socket.AF_INET6, host) | |||
except (socket.error, AttributeError, ValueError): | |||
return False | |||
return True | |||
def __str__(self): | |||
if self.host: | |||
s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '') | |||
else: | |||
s = object.__str__(self) | |||
return s | |||
def __repr__(self): | |||
r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self) | |||
r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self) | |||
r += '' if self.tls is None else ', tls={0.tls!r}'.format(self) | |||
r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self) | |||
r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self) | |||
r += '' if not self.mode else ', mode={0.mode!r}'.format(self) | |||
r += ')' | |||
return r | |||
@property | |||
def address_info(self): | |||
conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME') | |||
if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval: | |||
# converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time | |||
addresses = None | |||
try: | |||
if self.ipc: | |||
addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)] | |||
else: | |||
addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED) | |||
except (socket.gaierror, AttributeError): | |||
pass | |||
if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags | |||
try: | |||
addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP) | |||
except socket.gaierror: | |||
pass | |||
if addresses: | |||
self._address_info = [list(address) + [None, None] for address in addresses] | |||
self._address_info_resolved_time = datetime.now() | |||
else: | |||
self._address_info = [] | |||
self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date | |||
if log_enabled(BASIC): | |||
for address in self._address_info: | |||
log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2]) | |||
return self._address_info | |||
def update_availability(self, address, available): | |||
cont = 0 | |||
while cont < len(self._address_info): | |||
if self.address_info[cont] == address: | |||
self._address_info[cont][5] = True if available else False | |||
self._address_info[cont][6] = datetime.now() | |||
break | |||
cont += 1 | |||
def reset_availability(self): | |||
for address in self._address_info: | |||
address[5] = None | |||
address[6] = None | |||
def check_availability(self): | |||
""" | |||
Tries to open, connect and close a socket to specified address | |||
and port to check availability. Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in | |||
the Server object | |||
""" | |||
conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT') | |||
available = False | |||
self.reset_availability() | |||
for address in self.candidate_addresses(): | |||
available = True | |||
try: | |||
temp_socket = socket.socket(*address[:3]) | |||
if self.connect_timeout: | |||
temp_socket.settimeout(self.connect_timeout) | |||
else: | |||
temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default | |||
try: | |||
temp_socket.connect(address[4]) | |||
except socket.error: | |||
available = False | |||
finally: | |||
try: | |||
temp_socket.shutdown(socket.SHUT_RDWR) | |||
except socket.error: | |||
available = False | |||
finally: | |||
temp_socket.close() | |||
except socket.gaierror: | |||
available = False | |||
if available: | |||
if log_enabled(BASIC): | |||
log(BASIC, 'server <%s> available at <%r>', self, address) | |||
self.update_availability(address, True) | |||
break # if an available address is found exits immediately | |||
else: | |||
self.update_availability(address, False) | |||
if log_enabled(ERROR): | |||
log(ERROR, 'server <%s> not available at <%r>', self, address) | |||
return available | |||
@staticmethod | |||
def next_message_id(): | |||
""" | |||
LDAP messageId is unique for all connections to same server | |||
""" | |||
with Server._message_id_lock: | |||
Server._message_counter += 1 | |||
if Server._message_counter >= LDAP_MAX_INT: | |||
Server._message_counter = 1 | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'new message id <%d> generated', Server._message_counter) | |||
return Server._message_counter | |||
def _get_dsa_info(self, connection): | |||
""" | |||
Retrieve DSE operational attribute as per RFC4512 (5.1). | |||
""" | |||
if connection.strategy.no_real_dsa: # do not try for mock strategies | |||
return | |||
if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads | |||
result = connection.search(search_base='', | |||
search_filter='(objectClass=*)', | |||
search_scope=BASE, | |||
attributes=['altServer', # requests specific dsa info attributes | |||
'namingContexts', | |||
'supportedControl', | |||
'supportedExtension', | |||
'supportedFeatures', | |||
'supportedCapabilities', | |||
'supportedLdapVersion', | |||
'supportedSASLMechanisms', | |||
'vendorName', | |||
'vendorVersion', | |||
'subschemaSubentry', | |||
'*', | |||
'+'], # requests all remaining attributes (other), | |||
get_operational_attributes=True) | |||
with self.dit_lock: | |||
if isinstance(result, bool): # sync request | |||
self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info | |||
elif result: # asynchronous request, must check if attributes in response | |||
results, _ = connection.get_response(result) | |||
if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: | |||
self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes']) | |||
if log_enabled(BASIC): | |||
log(BASIC, 'DSA info read for <%s> via <%s>', self, connection) | |||
def _get_schema_info(self, connection, entry=''): | |||
""" | |||
Retrieve schema from subschemaSubentry DSE attribute, per RFC | |||
4512 (4.4 and 5.1); entry = '' means DSE. | |||
""" | |||
if connection.strategy.no_real_dsa: # do not try for mock strategies | |||
return | |||
schema_entry = None | |||
if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo | |||
if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES): | |||
schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None | |||
else: | |||
schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None | |||
else: | |||
result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True) | |||
if isinstance(result, bool): # sync request | |||
if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']: | |||
if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0: | |||
schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0] | |||
else: # asynchronous request, must check if subschemaSubentry in attributes | |||
results, _ = connection.get_response(result) | |||
if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']: | |||
if len(results[0]['raw_attributes']['subschemaSubentry']) > 0: | |||
schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0] | |||
if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads | |||
if isinstance(schema_entry, bytes) and str is not bytes: # Python 3 | |||
schema_entry = to_unicode(schema_entry, from_server=True) | |||
result = connection.search(schema_entry, | |||
search_filter='(objectClass=subschema)', | |||
search_scope=BASE, | |||
attributes=['objectClasses', # requests specific subschema attributes | |||
'attributeTypes', | |||
'ldapSyntaxes', | |||
'matchingRules', | |||
'matchingRuleUse', | |||
'dITContentRules', | |||
'dITStructureRules', | |||
'nameForms', | |||
'createTimestamp', | |||
'modifyTimestamp', | |||
'*'], # requests all remaining attributes (other) | |||
get_operational_attributes=True | |||
) | |||
with self.dit_lock: | |||
self._schema_info = None | |||
if result: | |||
if isinstance(result, bool): # sync request | |||
self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None | |||
else: # asynchronous request, must check if attributes in response | |||
results, result = connection.get_response(result) | |||
if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]: | |||
self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes']) | |||
if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None | |||
self._schema_info = None | |||
if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info | |||
for attribute in self._schema_info.other: | |||
self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter) | |||
if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values | |||
for attribute in self._dsa_info.other: | |||
self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter) | |||
if log_enabled(BASIC): | |||
log(BASIC, 'schema read for <%s> via <%s>', self, connection) | |||
def get_info_from_server(self, connection): | |||
""" | |||
reads info from DSE and from subschema | |||
""" | |||
if connection and not connection.closed: | |||
if self.get_info in [DSA, ALL]: | |||
self._get_dsa_info(connection) | |||
if self.get_info in [SCHEMA, ALL]: | |||
self._get_schema_info(connection) | |||
elif self.get_info == OFFLINE_EDIR_8_8_8: | |||
from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info | |||
self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema)) | |||
self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info)) | |||
elif self.get_info == OFFLINE_AD_2012_R2: | |||
from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info | |||
self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema)) | |||
self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info)) | |||
elif self.get_info == OFFLINE_SLAPD_2_4: | |||
from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info | |||
self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema)) | |||
self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info)) | |||
elif self.get_info == OFFLINE_DS389_1_3_3: | |||
from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info | |||
self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema)) | |||
self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info)) | |||
def attach_dsa_info(self, dsa_info=None): | |||
if isinstance(dsa_info, DsaInfo): | |||
self._dsa_info = dsa_info | |||
if log_enabled(BASIC): | |||
log(BASIC, 'attached DSA info to Server <%s>', self) | |||
def attach_schema_info(self, dsa_schema=None): | |||
if isinstance(dsa_schema, SchemaInfo): | |||
self._schema_info = dsa_schema | |||
if log_enabled(BASIC): | |||
log(BASIC, 'attached schema info to Server <%s>', self) | |||
@property | |||
def info(self): | |||
return self._dsa_info | |||
@property | |||
def schema(self): | |||
return self._schema_info | |||
@staticmethod | |||
def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None): | |||
""" | |||
Define a dummy server with preloaded schema and info | |||
:param host: host name | |||
:param dsa_info: DsaInfo preloaded object or a json formatted string or a file name | |||
:param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name | |||
:param port: dummy port | |||
:param use_ssl: use_ssl | |||
:param formatter: custom formatter | |||
:return: Server object | |||
""" | |||
if isinstance(host, SEQUENCE_TYPES): | |||
dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) # for ServerPool object | |||
else: | |||
dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) | |||
if isinstance(dsa_info, DsaInfo): | |||
dummy._dsa_info = dsa_info | |||
elif isinstance(dsa_info, STRING_TYPES): | |||
try: | |||
dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string | |||
except Exception: | |||
dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name | |||
if not dummy.info: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid DSA info for %s', host) | |||
raise LDAPDefinitionError('invalid dsa info') | |||
if isinstance(dsa_schema, SchemaInfo): | |||
dummy._schema_info = dsa_schema | |||
elif isinstance(dsa_schema, STRING_TYPES): | |||
try: | |||
dummy._schema_info = SchemaInfo.from_json(dsa_schema) | |||
except Exception: | |||
dummy._schema_info = SchemaInfo.from_file(dsa_schema) | |||
if not dummy.schema: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid schema info for %s', host) | |||
raise LDAPDefinitionError('invalid schema info') | |||
if log_enabled(BASIC): | |||
log(BASIC, 'created server <%s> from definition', dummy) | |||
return dummy | |||
def candidate_addresses(self): | |||
conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT') | |||
if self.ipc: | |||
candidates = self.address_info | |||
if log_enabled(BASIC): | |||
log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name) | |||
else: | |||
# checks reset availability timeout | |||
for address in self.address_info: | |||
if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout): | |||
address[5] = None | |||
address[6] = None | |||
# selects server address based on server mode and availability (in address[5]) | |||
addresses = self.address_info[:] # copy to avoid refreshing while searching candidates | |||
candidates = [] | |||
if addresses: | |||
if self.mode == IP_SYSTEM_DEFAULT: | |||
candidates.append(addresses[0]) | |||
elif self.mode == IP_V4_ONLY: | |||
candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] | |||
elif self.mode == IP_V6_ONLY: | |||
candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] | |||
elif self.mode == IP_V4_PREFERRED: | |||
candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] | |||
candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] | |||
elif self.mode == IP_V6_PREFERRED: | |||
candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)] | |||
candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)] | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid server mode for <%s>', self) | |||
raise LDAPInvalidServerError('invalid server mode') | |||
if log_enabled(BASIC): | |||
for candidate in candidates: | |||
log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode) | |||
return candidates |
@@ -0,0 +1,56 @@ | |||
""" | |||
""" | |||
# Created on 2015.01.07 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2015 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from datetime import timedelta, tzinfo | |||
# from python standard library docs | |||
class OffsetTzInfo(tzinfo): | |||
"""Fixed offset in minutes east from UTC""" | |||
def __init__(self, offset, name): | |||
self.offset = offset | |||
self.name = name | |||
self._offset = timedelta(minutes=offset) | |||
def __str__(self): | |||
return self.name | |||
def __repr__(self): | |||
return 'OffsetTzInfo(offset={0.offset!r}, name={0.name!r})'.format(self) | |||
def utcoffset(self, dt): | |||
return self._offset | |||
def tzname(self, dt): | |||
return self.name | |||
# noinspection PyMethodMayBeStatic | |||
def dst(self, dt): | |||
return timedelta(0) | |||
def __getinitargs__(self): # for pickling/unpickling | |||
return self.offset, self.name |
@@ -0,0 +1,326 @@ | |||
""" | |||
""" | |||
# Created on 2013.08.05 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2013 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory | |||
from .. import SEQUENCE_TYPES | |||
from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK | |||
try: | |||
# noinspection PyUnresolvedReferences | |||
import ssl | |||
except ImportError: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'SSL not supported in this Python interpreter') | |||
raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter') | |||
try: | |||
from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities | |||
except ImportError: | |||
from ..utils.tls_backport import CertificateError | |||
from ..utils.tls_backport import match_hostname | |||
if log_enabled(BASIC): | |||
log(BASIC, 'using tls_backport') | |||
try: # try to use SSLContext | |||
# noinspection PyUnresolvedReferences | |||
from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9 | |||
use_ssl_context = True | |||
except ImportError: | |||
use_ssl_context = False | |||
if log_enabled(BASIC): | |||
log(BASIC, 'SSLContext unavailable') | |||
from os import path | |||
# noinspection PyProtectedMember | |||
class Tls(object): | |||
""" | |||
tls/ssl configuration for Server object | |||
Starting from python 2.7.9 and python 3.4 uses the SSLContext object | |||
that tries to read the CAs defined at system level | |||
ca_certs_path and ca_certs_data are valid only when using SSLContext | |||
local_private_key_password is valid only when using SSLContext | |||
sni is the server name for Server Name Indication (when available) | |||
""" | |||
def __init__(self, | |||
local_private_key_file=None, | |||
local_certificate_file=None, | |||
validate=ssl.CERT_NONE, | |||
version=None, | |||
ca_certs_file=None, | |||
valid_names=None, | |||
ca_certs_path=None, | |||
ca_certs_data=None, | |||
local_private_key_password=None, | |||
ciphers=None, | |||
sni=None): | |||
if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]: | |||
self.validate = validate | |||
elif validate: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid validate parameter <%s>', validate) | |||
raise LDAPSSLConfigurationError('invalid validate parameter') | |||
if ca_certs_file and path.exists(ca_certs_file): | |||
self.ca_certs_file = ca_certs_file | |||
elif ca_certs_file: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid CA public key file <%s>', ca_certs_file) | |||
raise LDAPSSLConfigurationError('invalid CA public key file') | |||
else: | |||
self.ca_certs_file = None | |||
if ca_certs_path and use_ssl_context and path.exists(ca_certs_path): | |||
self.ca_certs_path = ca_certs_path | |||
elif ca_certs_path and not use_ssl_context: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'cannot use CA public keys path, SSLContext not available') | |||
raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available') | |||
elif ca_certs_path: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path) | |||
raise LDAPSSLConfigurationError('invalid CA public keys path') | |||
else: | |||
self.ca_certs_path = None | |||
if ca_certs_data and use_ssl_context: | |||
self.ca_certs_data = ca_certs_data | |||
elif ca_certs_data: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'cannot use CA data, SSLContext not available') | |||
raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available') | |||
else: | |||
self.ca_certs_data = None | |||
if local_private_key_password and use_ssl_context: | |||
self.private_key_password = local_private_key_password | |||
elif local_private_key_password: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'cannot use local private key password, SSLContext not available') | |||
raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available') | |||
else: | |||
self.private_key_password = None | |||
self.version = version | |||
self.private_key_file = local_private_key_file | |||
self.certificate_file = local_certificate_file | |||
self.valid_names = valid_names | |||
self.ciphers = ciphers | |||
self.sni = sni | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Tls: <%r>' % self) | |||
def __str__(self): | |||
s = [ | |||
'protocol: ' + str(self.version), | |||
'client private key: ' + ('present ' if self.private_key_file else 'not present'), | |||
'client certificate: ' + ('present ' if self.certificate_file else 'not present'), | |||
'private key password: ' + ('present ' if self.private_key_password else 'not present'), | |||
'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'), | |||
'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'), | |||
'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'), | |||
'verify mode: ' + str(self.validate), | |||
'valid names: ' + str(self.valid_names), | |||
'ciphers: ' + str(self.ciphers), | |||
'sni: ' + str(self.sni) | |||
] | |||
return ' - '.join(s) | |||
def __repr__(self): | |||
r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self) | |||
r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self) | |||
r += '' if self.validate is None else ', validate={0.validate!r}'.format(self) | |||
r += '' if self.version is None else ', version={0.version!r}'.format(self) | |||
r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self) | |||
r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self) | |||
r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self) | |||
r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self) | |||
r += '' if self.sni is None else ', sni={0.sni!r}'.format(self) | |||
r = 'Tls(' + r[2:] + ')' | |||
return r | |||
def wrap_socket(self, connection, do_handshake=False): | |||
""" | |||
Adds TLS to the connection socket | |||
""" | |||
if use_ssl_context: | |||
if self.version is None: # uses the default ssl context for reasonable security | |||
ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH, | |||
cafile=self.ca_certs_file, | |||
capath=self.ca_certs_path, | |||
cadata=self.ca_certs_data) | |||
else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version | |||
ssl_context = ssl.SSLContext(self.version) | |||
if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data: | |||
ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data) | |||
elif self.validate != ssl.CERT_NONE: | |||
ssl_context.load_default_certs(Purpose.SERVER_AUTH) | |||
if self.certificate_file: | |||
ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password) | |||
ssl_context.check_hostname = False | |||
ssl_context.verify_mode = self.validate | |||
if self.ciphers: | |||
try: | |||
ssl_context.set_ciphers(self.ciphers) | |||
except ssl.SSLError: | |||
pass | |||
if self.sni: | |||
wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni) | |||
else: | |||
wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake) | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection) | |||
else: | |||
if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'): | |||
self.version = ssl.PROTOCOL_SSLv23 | |||
if self.ciphers: | |||
try: | |||
wrapped_socket = ssl.wrap_socket(connection.socket, | |||
keyfile=self.private_key_file, | |||
certfile=self.certificate_file, | |||
server_side=False, | |||
cert_reqs=self.validate, | |||
ssl_version=self.version, | |||
ca_certs=self.ca_certs_file, | |||
do_handshake_on_connect=do_handshake, | |||
ciphers=self.ciphers) | |||
except ssl.SSLError: | |||
raise | |||
except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None | |||
self.ciphers = None | |||
if not self.ciphers: | |||
wrapped_socket = ssl.wrap_socket(connection.socket, | |||
keyfile=self.private_key_file, | |||
certfile=self.certificate_file, | |||
server_side=False, | |||
cert_reqs=self.validate, | |||
ssl_version=self.version, | |||
ca_certs=self.ca_certs_file, | |||
do_handshake_on_connect=do_handshake) | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'socket wrapped with SSL for <%s>', connection) | |||
if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL): | |||
check_hostname(wrapped_socket, connection.server.host, self.valid_names) | |||
connection.socket = wrapped_socket | |||
return | |||
def start_tls(self, connection): | |||
if connection.server.ssl: # ssl already established at server level | |||
return False | |||
if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress: | |||
# Per RFC 4513 (3.1.1) | |||
if log_enabled(ERROR): | |||
log(ERROR, "can't start tls because operations are in progress for <%s>", self) | |||
return False | |||
connection.starting_tls = True | |||
if log_enabled(BASIC): | |||
log(BASIC, 'starting tls for <%s>', connection) | |||
if not connection.strategy.sync: | |||
connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response | |||
result = connection.extended('1.3.6.1.4.1.1466.20037') | |||
if not connection.strategy.sync: | |||
# asynchronous - _start_tls must be executed by the strategy | |||
response = connection.get_response(result) | |||
if response != (None, None): | |||
if log_enabled(BASIC): | |||
log(BASIC, 'tls started for <%s>', connection) | |||
return True | |||
else: | |||
if log_enabled(BASIC): | |||
log(BASIC, 'tls not started for <%s>', connection) | |||
return False | |||
else: | |||
if connection.result['description'] not in ['success']: | |||
# startTLS failed | |||
connection.last_error = 'startTLS failed - ' + str(connection.result['description']) | |||
if log_enabled(ERROR): | |||
log(ERROR, '%s for <%s>', connection.last_error, connection) | |||
raise LDAPStartTLSError(connection.last_error) | |||
if log_enabled(BASIC): | |||
log(BASIC, 'tls started for <%s>', connection) | |||
return self._start_tls(connection) | |||
def _start_tls(self, connection): | |||
exc = None | |||
try: | |||
self.wrap_socket(connection, do_handshake=True) | |||
except Exception as e: | |||
connection.last_error = 'wrap socket error: ' + str(e) | |||
exc = e | |||
connection.starting_tls = False | |||
if exc: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection) | |||
raise start_tls_exception_factory(LDAPStartTLSError, exc)(connection.last_error) | |||
if connection.usage: | |||
connection._usage.wrapped_sockets += 1 | |||
connection.tls_started = True | |||
return True | |||
def check_hostname(sock, server_name, additional_names): | |||
server_certificate = sock.getpeercert() | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'certificate found for %s: %s', sock, server_certificate) | |||
if additional_names: | |||
host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names]) | |||
else: | |||
host_names = [server_name] | |||
for host_name in host_names: | |||
if not host_name: | |||
continue | |||
elif host_name == '*': | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'certificate matches * wildcard') | |||
return # valid | |||
try: | |||
match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name | |||
if log_enabled(NETWORK): | |||
log(NETWORK, 'certificate matches host name <%s>', host_name) | |||
return # valid | |||
except CertificateError as e: | |||
if log_enabled(NETWORK): | |||
log(NETWORK, str(e)) | |||
if log_enabled(ERROR): | |||
log(ERROR, "hostname doesn't match certificate") | |||
raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names))) |
@@ -0,0 +1,229 @@ | |||
""" | |||
""" | |||
# Created on 2014.03.15 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2013 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from datetime import datetime, timedelta | |||
from os import linesep | |||
from .exceptions import LDAPMetricsError | |||
from ..utils.log import log, log_enabled, ERROR, BASIC | |||
class ConnectionUsage(object): | |||
""" | |||
Collect statistics on connection usage | |||
""" | |||
def reset(self): | |||
self.open_sockets = 0 | |||
self.closed_sockets = 0 | |||
self.wrapped_sockets = 0 | |||
self.bytes_transmitted = 0 | |||
self.bytes_received = 0 | |||
self.messages_transmitted = 0 | |||
self.messages_received = 0 | |||
self.operations = 0 | |||
self.abandon_operations = 0 | |||
self.add_operations = 0 | |||
self.bind_operations = 0 | |||
self.compare_operations = 0 | |||
self.delete_operations = 0 | |||
self.extended_operations = 0 | |||
self.modify_operations = 0 | |||
self.modify_dn_operations = 0 | |||
self.search_operations = 0 | |||
self.unbind_operations = 0 | |||
self.referrals_received = 0 | |||
self.referrals_followed = 0 | |||
self.referrals_connections = 0 | |||
self.restartable_failures = 0 | |||
self.restartable_successes = 0 | |||
self.servers_from_pool = 0 | |||
if log_enabled(BASIC): | |||
log(BASIC, 'reset usage metrics') | |||
def __init__(self): | |||
self.initial_connection_start_time = None | |||
self.open_socket_start_time = None | |||
self.connection_stop_time = None | |||
self.last_transmitted_time = None | |||
self.last_received_time = None | |||
self.open_sockets = 0 | |||
self.closed_sockets = 0 | |||
self.wrapped_sockets = 0 | |||
self.bytes_transmitted = 0 | |||
self.bytes_received = 0 | |||
self.messages_transmitted = 0 | |||
self.messages_received = 0 | |||
self.operations = 0 | |||
self.abandon_operations = 0 | |||
self.add_operations = 0 | |||
self.bind_operations = 0 | |||
self.compare_operations = 0 | |||
self.delete_operations = 0 | |||
self.extended_operations = 0 | |||
self.modify_operations = 0 | |||
self.modify_dn_operations = 0 | |||
self.search_operations = 0 | |||
self.unbind_operations = 0 | |||
self.referrals_received = 0 | |||
self.referrals_followed = 0 | |||
self.referrals_connections = 0 | |||
self.restartable_failures = 0 | |||
self.restartable_successes = 0 | |||
self.servers_from_pool = 0 | |||
if log_enabled(BASIC): | |||
log(BASIC, 'instantiated Usage object') | |||
def __repr__(self): | |||
r = 'Connection Usage:' + linesep | |||
r += ' Time: [elapsed: ' + str(self.elapsed_time) + ']' + linesep | |||
r += ' Initial start time: ' + (str(self.initial_connection_start_time.isoformat()) if self.initial_connection_start_time else '') + linesep | |||
r += ' Open socket time: ' + (str(self.open_socket_start_time.isoformat()) if self.open_socket_start_time else '') + linesep | |||
r += ' Last transmitted time: ' + (str(self.last_transmitted_time.isoformat()) if self.last_transmitted_time else '') + linesep | |||
r += ' Last received time: ' + (str(self.last_received_time.isoformat()) if self.last_received_time else '') + linesep | |||
r += ' Close socket time: ' + (str(self.connection_stop_time.isoformat()) if self.connection_stop_time else '') + linesep | |||
r += ' Server:' + linesep | |||
r += ' Servers from pool: ' + str(self.servers_from_pool) + linesep | |||
r += ' Sockets open: ' + str(self.open_sockets) + linesep | |||
r += ' Sockets closed: ' + str(self.closed_sockets) + linesep | |||
r += ' Sockets wrapped: ' + str(self.wrapped_sockets) + linesep | |||
r += ' Bytes: ' + str(self.bytes_transmitted + self.bytes_received) + linesep | |||
r += ' Transmitted: ' + str(self.bytes_transmitted) + linesep | |||
r += ' Received: ' + str(self.bytes_received) + linesep | |||
r += ' Messages: ' + str(self.messages_transmitted + self.messages_received) + linesep | |||
r += ' Transmitted: ' + str(self.messages_transmitted) + linesep | |||
r += ' Received: ' + str(self.messages_received) + linesep | |||
r += ' Operations: ' + str(self.operations) + linesep | |||
r += ' Abandon: ' + str(self.abandon_operations) + linesep | |||
r += ' Bind: ' + str(self.bind_operations) + linesep | |||
r += ' Add: ' + str(self.add_operations) + linesep | |||
r += ' Compare: ' + str(self.compare_operations) + linesep | |||
r += ' Delete: ' + str(self.delete_operations) + linesep | |||
r += ' Extended: ' + str(self.extended_operations) + linesep | |||
r += ' Modify: ' + str(self.modify_operations) + linesep | |||
r += ' ModifyDn: ' + str(self.modify_dn_operations) + linesep | |||
r += ' Search: ' + str(self.search_operations) + linesep | |||
r += ' Unbind: ' + str(self.unbind_operations) + linesep | |||
r += ' Referrals: ' + linesep | |||
r += ' Received: ' + str(self.referrals_received) + linesep | |||
r += ' Followed: ' + str(self.referrals_followed) + linesep | |||
r += ' Connections: ' + str(self.referrals_connections) + linesep | |||
r += ' Restartable tries: ' + str(self.restartable_failures + self.restartable_successes) + linesep | |||
r += ' Failed restarts: ' + str(self.restartable_failures) + linesep | |||
r += ' Successful restarts: ' + str(self.restartable_successes) + linesep | |||
return r | |||
def __str__(self): | |||
return self.__repr__() | |||
def __iadd__(self, other): | |||
if not isinstance(other, ConnectionUsage): | |||
raise LDAPMetricsError('unable to add to ConnectionUsage') | |||
self.open_sockets += other.open_sockets | |||
self.closed_sockets += other.closed_sockets | |||
self.wrapped_sockets += other.wrapped_sockets | |||
self.bytes_transmitted += other.bytes_transmitted | |||
self.bytes_received += other.bytes_received | |||
self.messages_transmitted += other.messages_transmitted | |||
self.messages_received += other.messages_received | |||
self.operations += other.operations | |||
self.abandon_operations += other.abandon_operations | |||
self.add_operations += other.add_operations | |||
self.bind_operations += other.bind_operations | |||
self.compare_operations += other.compare_operations | |||
self.delete_operations += other.delete_operations | |||
self.extended_operations += other.extended_operations | |||
self.modify_operations += other.modify_operations | |||
self.modify_dn_operations += other.modify_dn_operations | |||
self.search_operations += other.search_operations | |||
self.unbind_operations += other.unbind_operations | |||
self.referrals_received += other.referrals_received | |||
self.referrals_followed += other.referrals_followed | |||
self.referrals_connections += other.referrals_connections | |||
self.restartable_failures += other.restartable_failures | |||
self.restartable_successes += other.restartable_successes | |||
self.servers_from_pool += other.servers_from_pool | |||
return self | |||
def update_transmitted_message(self, message, length): | |||
self.last_transmitted_time = datetime.now() | |||
self.bytes_transmitted += length | |||
self.operations += 1 | |||
self.messages_transmitted += 1 | |||
if message['type'] == 'abandonRequest': | |||
self.abandon_operations += 1 | |||
elif message['type'] == 'addRequest': | |||
self.add_operations += 1 | |||
elif message['type'] == 'bindRequest': | |||
self.bind_operations += 1 | |||
elif message['type'] == 'compareRequest': | |||
self.compare_operations += 1 | |||
elif message['type'] == 'delRequest': | |||
self.delete_operations += 1 | |||
elif message['type'] == 'extendedReq': | |||
self.extended_operations += 1 | |||
elif message['type'] == 'modifyRequest': | |||
self.modify_operations += 1 | |||
elif message['type'] == 'modDNRequest': | |||
self.modify_dn_operations += 1 | |||
elif message['type'] == 'searchRequest': | |||
self.search_operations += 1 | |||
elif message['type'] == 'unbindRequest': | |||
self.unbind_operations += 1 | |||
else: | |||
if log_enabled(ERROR): | |||
log(ERROR, 'unable to collect usage for unknown message type <%s>', message['type']) | |||
raise LDAPMetricsError('unable to collect usage for unknown message type') | |||
def update_received_message(self, length): | |||
self.last_received_time = datetime.now() | |||
self.bytes_received += length | |||
self.messages_received += 1 | |||
def start(self, reset=True): | |||
if reset: | |||
self.reset() | |||
self.open_socket_start_time = datetime.now() | |||
self.connection_stop_time = None | |||
if not self.initial_connection_start_time: | |||
self.initial_connection_start_time = self.open_socket_start_time | |||
if log_enabled(BASIC): | |||
log(BASIC, 'start collecting usage metrics') | |||
def stop(self): | |||
if self.open_socket_start_time: | |||
self.connection_stop_time = datetime.now() | |||
if log_enabled(BASIC): | |||
log(BASIC, 'stop collecting usage metrics') | |||
@property | |||
def elapsed_time(self): | |||
if self.connection_stop_time: | |||
return self.connection_stop_time - self.open_socket_start_time | |||
else: | |||
return (datetime.now() - self.open_socket_start_time) if self.open_socket_start_time else timedelta(0) |
@@ -0,0 +1,289 @@ | |||
""" | |||
""" | |||
# Created on 2014.04.28 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from os import linesep | |||
from .. import SUBTREE, DEREF_ALWAYS, ALL_ATTRIBUTES, DEREF_NEVER | |||
from .microsoft.dirSync import DirSync | |||
from .microsoft.modifyPassword import ad_modify_password | |||
from .microsoft.unlockAccount import ad_unlock_account | |||
from .microsoft.addMembersToGroups import ad_add_members_to_groups | |||
from .microsoft.removeMembersFromGroups import ad_remove_members_from_groups | |||
from .novell.partition_entry_count import PartitionEntryCount | |||
from .novell.replicaInfo import ReplicaInfo | |||
from .novell.listReplicas import ListReplicas | |||
from .novell.getBindDn import GetBindDn | |||
from .novell.nmasGetUniversalPassword import NmasGetUniversalPassword | |||
from .novell.nmasSetUniversalPassword import NmasSetUniversalPassword | |||
from .novell.startTransaction import StartTransaction | |||
from .novell.endTransaction import EndTransaction | |||
from .novell.addMembersToGroups import edir_add_members_to_groups | |||
from .novell.removeMembersFromGroups import edir_remove_members_from_groups | |||
from .novell.checkGroupsMemberships import edir_check_groups_memberships | |||
from .standard.whoAmI import WhoAmI | |||
from .standard.modifyPassword import ModifyPassword | |||
from .standard.PagedSearch import paged_search_generator, paged_search_accumulator | |||
from .standard.PersistentSearch import PersistentSearch | |||
class ExtendedOperationContainer(object): | |||
def __init__(self, connection): | |||
self._connection = connection | |||
def __repr__(self): | |||
return linesep.join([' ' + element for element in dir(self) if element[0] != '_']) | |||
def __str__(self): | |||
return self.__repr__() | |||
class StandardExtendedOperations(ExtendedOperationContainer): | |||
def who_am_i(self, controls=None): | |||
return WhoAmI(self._connection, | |||
controls).send() | |||
def modify_password(self, | |||
user=None, | |||
old_password=None, | |||
new_password=None, | |||
hash_algorithm=None, | |||
salt=None, | |||
controls=None): | |||
return ModifyPassword(self._connection, | |||
user, | |||
old_password, | |||
new_password, | |||
hash_algorithm, | |||
salt, | |||
controls).send() | |||
def paged_search(self, | |||
search_base, | |||
search_filter, | |||
search_scope=SUBTREE, | |||
dereference_aliases=DEREF_ALWAYS, | |||
attributes=None, | |||
size_limit=0, | |||
time_limit=0, | |||
types_only=False, | |||
get_operational_attributes=False, | |||
controls=None, | |||
paged_size=100, | |||
paged_criticality=False, | |||
generator=True): | |||
if generator: | |||
return paged_search_generator(self._connection, | |||
search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
types_only, | |||
get_operational_attributes, | |||
controls, | |||
paged_size, | |||
paged_criticality) | |||
else: | |||
return paged_search_accumulator(self._connection, | |||
search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
types_only, | |||
get_operational_attributes, | |||
controls, | |||
paged_size, | |||
paged_criticality) | |||
def persistent_search(self, | |||
search_base='', | |||
search_filter='(objectclass=*)', | |||
search_scope=SUBTREE, | |||
dereference_aliases=DEREF_NEVER, | |||
attributes=ALL_ATTRIBUTES, | |||
size_limit=0, | |||
time_limit=0, | |||
controls=None, | |||
changes_only=True, | |||
show_additions=True, | |||
show_deletions=True, | |||
show_modifications=True, | |||
show_dn_modifications=True, | |||
notifications=True, | |||
streaming=True, | |||
callback=None | |||
): | |||
events_type = 0 | |||
if show_additions: | |||
events_type += 1 | |||
if show_deletions: | |||
events_type += 2 | |||
if show_modifications: | |||
events_type += 4 | |||
if show_dn_modifications: | |||
events_type += 8 | |||
if callback: | |||
streaming = False | |||
return PersistentSearch(self._connection, | |||
search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
controls, | |||
changes_only, | |||
events_type, | |||
notifications, | |||
streaming, | |||
callback) | |||
class NovellExtendedOperations(ExtendedOperationContainer): | |||
def get_bind_dn(self, controls=None): | |||
return GetBindDn(self._connection, | |||
controls).send() | |||
def get_universal_password(self, user, controls=None): | |||
return NmasGetUniversalPassword(self._connection, | |||
user, | |||
controls).send() | |||
def set_universal_password(self, user, new_password=None, controls=None): | |||
return NmasSetUniversalPassword(self._connection, | |||
user, | |||
new_password, | |||
controls).send() | |||
def list_replicas(self, server_dn, controls=None): | |||
return ListReplicas(self._connection, | |||
server_dn, | |||
controls).send() | |||
def partition_entry_count(self, partition_dn, controls=None): | |||
return PartitionEntryCount(self._connection, | |||
partition_dn, | |||
controls).send() | |||
def replica_info(self, server_dn, partition_dn, controls=None): | |||
return ReplicaInfo(self._connection, | |||
server_dn, | |||
partition_dn, | |||
controls).send() | |||
def start_transaction(self, controls=None): | |||
return StartTransaction(self._connection, | |||
controls).send() | |||
def end_transaction(self, commit=True, controls=None): # attach the groupingControl to commit, None to abort transaction | |||
return EndTransaction(self._connection, | |||
commit, | |||
controls).send() | |||
def add_members_to_groups(self, members, groups, fix=True, transaction=True): | |||
return edir_add_members_to_groups(self._connection, | |||
members_dn=members, | |||
groups_dn=groups, | |||
fix=fix, | |||
transaction=transaction) | |||
def remove_members_from_groups(self, members, groups, fix=True, transaction=True): | |||
return edir_remove_members_from_groups(self._connection, | |||
members_dn=members, | |||
groups_dn=groups, | |||
fix=fix, | |||
transaction=transaction) | |||
def check_groups_memberships(self, members, groups, fix=False, transaction=True): | |||
return edir_check_groups_memberships(self._connection, | |||
members_dn=members, | |||
groups_dn=groups, | |||
fix=fix, | |||
transaction=transaction) | |||
class MicrosoftExtendedOperations(ExtendedOperationContainer): | |||
def dir_sync(self, | |||
sync_base, | |||
sync_filter='(objectclass=*)', | |||
attributes=ALL_ATTRIBUTES, | |||
cookie=None, | |||
object_security=False, | |||
ancestors_first=True, | |||
public_data_only=False, | |||
incremental_values=True, | |||
max_length=2147483647, | |||
hex_guid=False): | |||
return DirSync(self._connection, | |||
sync_base=sync_base, | |||
sync_filter=sync_filter, | |||
attributes=attributes, | |||
cookie=cookie, | |||
object_security=object_security, | |||
ancestors_first=ancestors_first, | |||
public_data_only=public_data_only, | |||
incremental_values=incremental_values, | |||
max_length=max_length, | |||
hex_guid=hex_guid) | |||
def modify_password(self, user, new_password, old_password=None, controls=None): | |||
return ad_modify_password(self._connection, | |||
user, | |||
new_password, | |||
old_password, | |||
controls) | |||
def unlock_account(self, user): | |||
return ad_unlock_account(self._connection, | |||
user) | |||
def add_members_to_groups(self, members, groups, fix=True): | |||
return ad_add_members_to_groups(self._connection, | |||
members_dn=members, | |||
groups_dn=groups, | |||
fix=fix) | |||
def remove_members_from_groups(self, members, groups, fix=True): | |||
return ad_remove_members_from_groups(self._connection, | |||
members_dn=members, | |||
groups_dn=groups, | |||
fix=fix) | |||
class ExtendedOperationsRoot(ExtendedOperationContainer): | |||
def __init__(self, connection): | |||
ExtendedOperationContainer.__init__(self, connection) # calls super | |||
self.standard = StandardExtendedOperations(self._connection) | |||
self.novell = NovellExtendedOperations(self._connection) | |||
self.microsoft = MicrosoftExtendedOperations(self._connection) |
@@ -0,0 +1,81 @@ | |||
""" | |||
""" | |||
# Created on 2016.12.26 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...core.exceptions import LDAPInvalidDnError | |||
from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER | |||
def ad_add_members_to_groups(connection, | |||
members_dn, | |||
groups_dn, | |||
fix=True): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to add to groups | |||
:param groups_dn: the list of groups where members are to be added | |||
:param fix: checks for group existence and already assigned members | |||
:return: a boolean where True means that the operation was successful and False means an error has happened | |||
Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups. | |||
Raises LDAPInvalidDnError if members or groups are not found in the DIT. | |||
""" | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
error = False | |||
for group in groups_dn: | |||
if fix: # checks for existance of group and for already assigned members | |||
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(group + ' not found') | |||
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] | |||
existing_members = [element.lower() for element in existing_members] | |||
else: | |||
existing_members = [] | |||
changes = dict() | |||
member_to_add = [element for element in members_dn if element.lower() not in existing_members] | |||
if member_to_add: | |||
changes['member'] = (MODIFY_ADD, member_to_add) | |||
if changes: | |||
result = connection.modify(group, changes) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
return not error # returns True if no error is raised in the LDAP operations |
@@ -0,0 +1,91 @@ | |||
""" | |||
""" | |||
# Created on 2015.10.21 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2015 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...core.exceptions import LDAPExtensionError | |||
from ...protocol.microsoft import dir_sync_control, extended_dn_control, show_deleted_control | |||
from ... import SUBTREE, DEREF_NEVER | |||
from ...utils.dn import safe_dn | |||
class DirSync(object): | |||
def __init__(self, | |||
connection, | |||
sync_base, | |||
sync_filter, | |||
attributes, | |||
cookie, | |||
object_security, | |||
ancestors_first, | |||
public_data_only, | |||
incremental_values, | |||
max_length, | |||
hex_guid | |||
): | |||
self.connection = connection | |||
if self.connection.check_names and sync_base: | |||
self. base = safe_dn(sync_base) | |||
else: | |||
self.base = sync_base | |||
self.filter = sync_filter | |||
self.attributes = attributes | |||
self.cookie = cookie | |||
self.object_security = object_security | |||
self.ancestors_first = ancestors_first | |||
self.public_data_only = public_data_only | |||
self.incremental_values = incremental_values | |||
self.max_length = max_length | |||
self.hex_guid = hex_guid | |||
self.more_results = True | |||
def loop(self): | |||
result = self.connection.search(search_base=self.base, | |||
search_filter=self.filter, | |||
search_scope=SUBTREE, | |||
attributes=self.attributes, | |||
dereference_aliases=DEREF_NEVER, | |||
controls=[dir_sync_control(criticality=True, | |||
object_security=self.object_security, | |||
ancestors_first=self.ancestors_first, | |||
public_data_only=self.public_data_only, | |||
incremental_values=self.incremental_values, | |||
max_length=self.max_length, cookie=self.cookie), | |||
extended_dn_control(criticality=False, hex_format=self.hex_guid), | |||
show_deleted_control(criticality=False)] | |||
) | |||
if not self.connection.strategy.sync: | |||
response, result = self.connection.get_response(result) | |||
else: | |||
response = self.connection.response | |||
result = self.connection.result | |||
if result['description'] == 'success' and 'controls' in result and '1.2.840.113556.1.4.841' in result['controls']: | |||
self.more_results = result['controls']['1.2.840.113556.1.4.841']['value']['more_results'] | |||
self.cookie = result['controls']['1.2.840.113556.1.4.841']['value']['cookie'] | |||
return response | |||
elif 'controls' in result: | |||
raise LDAPExtensionError('Missing DirSync control in response from server') | |||
else: | |||
raise LDAPExtensionError('error %r in DirSync' % result) | |||
@@ -0,0 +1,72 @@ | |||
""" | |||
""" | |||
# Created on 2015.11.27 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2015 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ... import MODIFY_REPLACE, MODIFY_DELETE, MODIFY_ADD | |||
from ...utils.log import log, log_enabled, PROTOCOL | |||
from ...core.results import RESULT_SUCCESS | |||
from ...utils.dn import safe_dn | |||
from ...utils.conv import to_unicode | |||
def ad_modify_password(connection, user_dn, new_password, old_password, controls=None): | |||
# old password must be None to reset password with sufficient privileges | |||
if connection.check_names: | |||
user_dn = safe_dn(user_dn) | |||
if str is bytes: # python2, converts to unicode | |||
new_password = to_unicode(new_password) | |||
if old_password: | |||
old_password = to_unicode(old_password) | |||
encoded_new_password = ('"%s"' % new_password).encode('utf-16-le') | |||
if old_password: # normal users must specify old and new password | |||
encoded_old_password = ('"%s"' % old_password).encode('utf-16-le') | |||
result = connection.modify(user_dn, | |||
{'unicodePwd': [(MODIFY_DELETE, [encoded_old_password]), | |||
(MODIFY_ADD, [encoded_new_password])]}, | |||
controls) | |||
else: # admin users can reset password without sending the old one | |||
result = connection.modify(user_dn, | |||
{'unicodePwd': [(MODIFY_REPLACE, [encoded_new_password])]}, | |||
controls) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
# change successful, returns True | |||
if result['result'] == RESULT_SUCCESS: | |||
return True | |||
# change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result'] | |||
if connection.raise_exceptions: | |||
from ...core.exceptions import LDAPOperationResult | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'operation result <%s> for <%s>', result, connection) | |||
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type']) | |||
return False |
@@ -0,0 +1,93 @@ | |||
""" | |||
""" | |||
# Created on 2016.12.26 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...core.exceptions import LDAPInvalidDnError | |||
from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER | |||
from ...utils.dn import safe_dn | |||
def ad_remove_members_from_groups(connection, | |||
members_dn, | |||
groups_dn, | |||
fix): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to remove from groups | |||
:param groups_dn: the list of groups where members are to be removed | |||
:param fix: checks for group existence and existing members | |||
:return: a boolean where True means that the operation was successful and False means an error has happened | |||
Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute | |||
""" | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
if connection.check_names: # builds new lists with sanitized dn | |||
safe_members_dn = [] | |||
safe_groups_dn = [] | |||
for member_dn in members_dn: | |||
safe_members_dn.append(safe_dn(member_dn)) | |||
for group_dn in groups_dn: | |||
safe_groups_dn.append(safe_dn(group_dn)) | |||
members_dn = safe_members_dn | |||
groups_dn = safe_groups_dn | |||
error = False | |||
for group in groups_dn: | |||
if fix: # checks for existance of group and for already assigned members | |||
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(group + ' not found') | |||
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] | |||
else: | |||
existing_members = members_dn | |||
existing_members = [element.lower() for element in existing_members] | |||
changes = dict() | |||
member_to_remove = [element for element in members_dn if element.lower() in existing_members] | |||
if member_to_remove: | |||
changes['member'] = (MODIFY_DELETE, member_to_remove) | |||
if changes: | |||
result = connection.modify(group, changes) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
return not error |
@@ -0,0 +1,56 @@ | |||
""" | |||
""" | |||
# Created on 2016.11.01 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2015 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ... import MODIFY_REPLACE | |||
from ...utils.log import log, log_enabled, PROTOCOL | |||
from ...core.results import RESULT_SUCCESS | |||
from ...utils.dn import safe_dn | |||
def ad_unlock_account(connection, user_dn, controls=None): | |||
if connection.check_names: | |||
user_dn = safe_dn(user_dn) | |||
result = connection.modify(user_dn, | |||
{'lockoutTime': [(MODIFY_REPLACE, ['0'])]}, | |||
controls) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
# change successful, returns True | |||
if result['result'] == RESULT_SUCCESS: | |||
return True | |||
# change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result'] | |||
if connection.raise_exceptions: | |||
from ...core.exceptions import LDAPOperationResult | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'operation result <%s> for <%s>', result, connection) | |||
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type']) | |||
return result |
@@ -0,0 +1,153 @@ | |||
""" | |||
""" | |||
# Created on 2016.04.16 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...core.exceptions import LDAPInvalidDnError | |||
from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER | |||
from ...utils.dn import safe_dn | |||
def edir_add_members_to_groups(connection, | |||
members_dn, | |||
groups_dn, | |||
fix, | |||
transaction): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to add to groups | |||
:param groups_dn: the list of groups where members are to be added | |||
:param fix: checks for inconsistences in the users-groups relation and fixes them | |||
:param transaction: activates an LDAP transaction | |||
:return: a boolean where True means that the operation was successful and False means an error has happened | |||
Establishes users-groups relations following the eDirectory rules: groups are added to securityEquals and groupMembership | |||
attributes in the member object while members are added to member and equivalentToMe attributes in the group object. | |||
Raises LDAPInvalidDnError if members or groups are not found in the DIT. | |||
""" | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
transaction_control = None | |||
error = False | |||
if connection.check_names: # builds new lists with sanitized dn | |||
safe_members_dn = [] | |||
safe_groups_dn = [] | |||
for member_dn in members_dn: | |||
safe_members_dn.append(safe_dn(member_dn)) | |||
for group_dn in groups_dn: | |||
safe_groups_dn.append(safe_dn(group_dn)) | |||
members_dn = safe_members_dn | |||
groups_dn = safe_groups_dn | |||
if transaction: | |||
transaction_control = connection.extend.novell.start_transaction() | |||
if not error: | |||
for member in members_dn: | |||
if fix: # checks for existance of member and for already assigned groups | |||
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['securityEquals', 'groupMembership']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(member + ' not found') | |||
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else [] | |||
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else [] | |||
existing_security_equals = [element.lower() for element in existing_security_equals] | |||
existing_group_membership = [element.lower() for element in existing_group_membership] | |||
else: | |||
existing_security_equals = [] | |||
existing_group_membership = [] | |||
changes = dict() | |||
security_equals_to_add = [element for element in groups_dn if element.lower() not in existing_security_equals] | |||
group_membership_to_add = [element for element in groups_dn if element.lower() not in existing_group_membership] | |||
if security_equals_to_add: | |||
changes['securityEquals'] = (MODIFY_ADD, security_equals_to_add) | |||
if group_membership_to_add: | |||
changes['groupMembership'] = (MODIFY_ADD, group_membership_to_add) | |||
if changes: | |||
result = connection.modify(member, changes, controls=[transaction_control] if transaction else None) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
if not error: | |||
for group in groups_dn: | |||
if fix: # checks for existance of group and for already assigned members | |||
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(group + ' not found') | |||
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] | |||
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else [] | |||
existing_members = [element.lower() for element in existing_members] | |||
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me] | |||
else: | |||
existing_members = [] | |||
existing_equivalent_to_me = [] | |||
changes = dict() | |||
member_to_add = [element for element in members_dn if element.lower() not in existing_members] | |||
equivalent_to_me_to_add = [element for element in members_dn if element.lower() not in existing_equivalent_to_me] | |||
if member_to_add: | |||
changes['member'] = (MODIFY_ADD, member_to_add) | |||
if equivalent_to_me_to_add: | |||
changes['equivalentToMe'] = (MODIFY_ADD, equivalent_to_me_to_add) | |||
if changes: | |||
result = connection.modify(group, changes, controls=[transaction_control] if transaction else None) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
if transaction: | |||
if error: # aborts transaction in case of error in the modify operations | |||
result = connection.extend.novell.end_transaction(commit=False, controls=[transaction_control]) | |||
else: | |||
result = connection.extend.novell.end_transaction(commit=True, controls=[transaction_control]) | |||
if result['description'] != 'success': | |||
error = True | |||
return not error # returns True if no error is raised in the LDAP operations |
@@ -0,0 +1,172 @@ | |||
""" | |||
""" | |||
# Created on 2016.05.14 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from .addMembersToGroups import edir_add_members_to_groups | |||
from ...core.exceptions import LDAPInvalidDnError | |||
from ... import SEQUENCE_TYPES, BASE, DEREF_NEVER | |||
from ...utils.dn import safe_dn | |||
def _check_members_have_memberships(connection, | |||
members_dn, | |||
groups_dn): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to add to groups | |||
:param groups_dn: the list of groups where members are to be added | |||
:return: two booleans. The first when True means that all members have membership in all groups, The second when True means that | |||
there are inconsistences in the securityEquals attribute | |||
Checks user's group membership. | |||
Raises LDAPInvalidDNError if member is not found in the DIT. | |||
""" | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
partial = False # True when a member has groupMembership but doesn't have securityEquals | |||
for member in members_dn: | |||
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['groupMembership', 'securityEquals']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': # member not found in DIT | |||
raise LDAPInvalidDnError(member + ' not found') | |||
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else [] | |||
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else [] | |||
existing_security_equals = [element.lower() for element in existing_security_equals] | |||
existing_group_membership = [element.lower() for element in existing_group_membership] | |||
for group in groups_dn: | |||
if group.lower() not in existing_group_membership: | |||
return False, False | |||
if group.lower() not in existing_security_equals: | |||
partial = True | |||
return True, partial | |||
def _check_groups_contain_members(connection, | |||
groups_dn, | |||
members_dn): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to add to groups | |||
:param groups_dn: the list of groups where members are to be added | |||
:return: two booleans. The first when True means that all members have membership in all groups, The second when True means that | |||
there are inconsistences in the EquivalentToMe attribute | |||
Checks if groups have members in their 'member' attribute. | |||
Raises LDAPInvalidDNError if member is not found in the DIT. | |||
""" | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
partial = False # True when a group has member but doesn't have equivalentToMe | |||
for group in groups_dn: | |||
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(group + ' not found') | |||
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] | |||
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else [] | |||
existing_members = [element.lower() for element in existing_members] | |||
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me] | |||
for member in members_dn: | |||
if member.lower() not in existing_members: | |||
return False, False | |||
if member.lower() not in existing_equivalent_to_me: | |||
partial = True | |||
return True, partial | |||
def edir_check_groups_memberships(connection, | |||
members_dn, | |||
groups_dn, | |||
fix, | |||
transaction): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to check | |||
:param groups_dn: the list of groups to check | |||
:param fix: checks for inconsistences in the users-groups relation and fixes them | |||
:param transaction: activates an LDAP transaction when fixing | |||
:return: a boolean where True means that the operation was successful and False means an error has happened | |||
Checks and fixes users-groups relations following the eDirectory rules: groups are checked against 'groupMembership' | |||
attribute in the member object while members are checked against 'member' attribute in the group object. | |||
Raises LDAPInvalidDnError if members or groups are not found in the DIT. | |||
""" | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if connection.check_names: # builds new lists with sanitized dn | |||
safe_members_dn = [] | |||
safe_groups_dn = [] | |||
for member_dn in members_dn: | |||
safe_members_dn.append(safe_dn(member_dn)) | |||
for group_dn in groups_dn: | |||
safe_groups_dn.append(safe_dn(group_dn)) | |||
members_dn = safe_members_dn | |||
groups_dn = safe_groups_dn | |||
try: | |||
members_have_memberships, partial_member_security = _check_members_have_memberships(connection, members_dn, groups_dn) | |||
groups_contain_members, partial_group_security = _check_groups_contain_members(connection, groups_dn, members_dn) | |||
except LDAPInvalidDnError: | |||
return False | |||
if not members_have_memberships and not groups_contain_members: | |||
return False | |||
if fix: # fix any inconsistences | |||
if (members_have_memberships and not groups_contain_members) \ | |||
or (groups_contain_members and not members_have_memberships) \ | |||
or partial_group_security \ | |||
or partial_member_security: | |||
for member in members_dn: | |||
for group in groups_dn: | |||
edir_add_members_to_groups(connection, member, group, True, transaction) | |||
return True |
@@ -0,0 +1,58 @@ | |||
""" | |||
""" | |||
# Created on 2016.04.14 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.novell import EndGroupTypeRequestValue, EndGroupTypeResponseValue, Sequence | |||
from ...utils.asn1 import decoder | |||
class EndTransaction(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.103.2' | |||
self.response_name = '2.16.840.1.113719.1.27.103.2' | |||
self.request_value = EndGroupTypeRequestValue() | |||
self.asn1_spec = EndGroupTypeResponseValue() | |||
def __init__(self, connection, commit=True, controls=None): | |||
if controls and len(controls) == 1: | |||
group_cookie = decoder.decode(controls[0][2], asn1Spec=Sequence())[0][0] # get the cookie from the built groupingControl | |||
else: | |||
group_cookie = None | |||
controls = None | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if group_cookie: | |||
self.request_value['endGroupCookie'] = group_cookie # transactionGroupingType | |||
if not commit: | |||
self.request_value['endGroupValue'] = '' # an empty endGroupValue means abort transaction | |||
def populate_result(self): | |||
try: | |||
self.result['value'] = self.decoded_response['endGroupValue'] | |||
except TypeError: | |||
self.result['value'] = None | |||
def set_response(self): | |||
self.response_value = self.result |
@@ -0,0 +1,41 @@ | |||
""" | |||
""" | |||
# Created on 2014.04.30 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...protocol.novell import Identity | |||
from ...extend.operation import ExtendedOperation | |||
class GetBindDn(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.100.31' | |||
self.response_name = '2.16.840.1.113719.1.27.100.32' | |||
self.response_attribute = 'identity' | |||
self.asn1_spec = Identity() | |||
def populate_result(self): | |||
try: | |||
self.result['identity'] = str(self.decoded_response) if self.decoded_response else None | |||
except TypeError: | |||
self.result['identity'] = None |
@@ -0,0 +1,50 @@ | |||
""" | |||
""" | |||
# Created on 2014.07.03 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.novell import ReplicaList | |||
from ...protocol.rfc4511 import LDAPDN | |||
from ...utils.dn import safe_dn | |||
class ListReplicas(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.100.19' | |||
self.response_name = '2.16.840.1.113719.1.27.100.20' | |||
self.request_value = LDAPDN() | |||
self.asn1_spec = ReplicaList() | |||
self.response_attribute = 'replicas' | |||
def __init__(self, connection, server_dn, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if connection.check_names: | |||
server_dn = safe_dn(server_dn) | |||
self.request_value = LDAPDN(server_dn) | |||
def populate_result(self): | |||
try: | |||
self.result['replicas'] = str(self.decoded_response['replicaList']) if self.decoded_response['replicaList'] else None | |||
except TypeError: | |||
self.result['replicas'] = None |
@@ -0,0 +1,56 @@ | |||
""" | |||
""" | |||
# Created on 2014.07.03 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.novell import NmasGetUniversalPasswordRequestValue, NmasGetUniversalPasswordResponseValue, NMAS_LDAP_EXT_VERSION | |||
from ...utils.dn import safe_dn | |||
class NmasGetUniversalPassword(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.39.42.100.13' | |||
self.response_name = '2.16.840.1.113719.1.39.42.100.14' | |||
self.request_value = NmasGetUniversalPasswordRequestValue() | |||
self.asn1_spec = NmasGetUniversalPasswordResponseValue() | |||
self.response_attribute = 'password' | |||
def __init__(self, connection, user, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if connection.check_names: | |||
user = safe_dn(user) | |||
self.request_value['nmasver'] = NMAS_LDAP_EXT_VERSION | |||
self.request_value['reqdn'] = user | |||
def populate_result(self): | |||
if self.decoded_response: | |||
self.result['nmasver'] = int(self.decoded_response['nmasver']) | |||
self.result['error'] = int(self.decoded_response['err']) | |||
try: | |||
self.result['password'] = str(self.decoded_response['passwd']) if self.decoded_response['passwd'].hasValue() else None | |||
except TypeError: | |||
self.result['password'] = None |
@@ -0,0 +1,52 @@ | |||
""" | |||
""" | |||
# Created on 2014.07.03 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.novell import NmasSetUniversalPasswordRequestValue, NmasSetUniversalPasswordResponseValue, NMAS_LDAP_EXT_VERSION | |||
from ...utils.dn import safe_dn | |||
class NmasSetUniversalPassword(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.39.42.100.11' | |||
self.response_name = '2.16.840.1.113719.1.39.42.100.12' | |||
self.request_value = NmasSetUniversalPasswordRequestValue() | |||
self.asn1_spec = NmasSetUniversalPasswordResponseValue() | |||
self.response_attribute = 'password' | |||
def __init__(self, connection, user, new_password, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if connection.check_names and user: | |||
user = safe_dn(user) | |||
self.request_value['nmasver'] = NMAS_LDAP_EXT_VERSION | |||
if user: | |||
self.request_value['reqdn'] = user | |||
if new_password: | |||
self.request_value['new_passwd'] = new_password | |||
def populate_result(self): | |||
self.result['nmasver'] = int(self.decoded_response['nmasver']) | |||
self.result['error'] = int(self.decoded_response['err']) |
@@ -0,0 +1,57 @@ | |||
""" | |||
""" | |||
# Created on 2014.08.05 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from pyasn1.type.univ import Integer | |||
from ...core.exceptions import LDAPExtensionError | |||
from ..operation import ExtendedOperation | |||
from ...protocol.rfc4511 import LDAPDN | |||
from ...utils.asn1 import decoder | |||
from ...utils.dn import safe_dn | |||
class PartitionEntryCount(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.100.13' | |||
self.response_name = '2.16.840.1.113719.1.27.100.14' | |||
self.request_value = LDAPDN() | |||
self.response_attribute = 'entry_count' | |||
def __init__(self, connection, partition_dn, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if connection.check_names: | |||
partition_dn = safe_dn(partition_dn) | |||
self.request_value = LDAPDN(partition_dn) | |||
def populate_result(self): | |||
substrate = self.decoded_response | |||
try: | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['entry_count'] = int(decoded) | |||
except Exception: | |||
raise LDAPExtensionError('unable to decode substrate') | |||
if substrate: | |||
raise LDAPExtensionError('unknown substrate remaining') |
@@ -0,0 +1,156 @@ | |||
""" | |||
""" | |||
# Created on 2016.04.17 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...core.exceptions import LDAPInvalidDnError | |||
from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER | |||
from ...utils.dn import safe_dn | |||
def edir_remove_members_from_groups(connection, | |||
members_dn, | |||
groups_dn, | |||
fix, | |||
transaction): | |||
""" | |||
:param connection: a bound Connection object | |||
:param members_dn: the list of members to remove from groups | |||
:param groups_dn: the list of groups where members are to be removed | |||
:param fix: checks for inconsistences in the users-groups relation and fixes them | |||
:param transaction: activates an LDAP transaction | |||
:return: a boolean where True means that the operation was successful and False means an error has happened | |||
Removes users-groups relations following the eDirectory rules: groups are removed from securityEquals and groupMembership | |||
attributes in the member object while members are removed from member and equivalentToMe attributes in the group object. | |||
Raises LDAPInvalidDnError if members or groups are not found in the DIT. | |||
""" | |||
if not isinstance(members_dn, SEQUENCE_TYPES): | |||
members_dn = [members_dn] | |||
if not isinstance(groups_dn, SEQUENCE_TYPES): | |||
groups_dn = [groups_dn] | |||
if connection.check_names: # builds new lists with sanitized dn | |||
safe_members_dn = [] | |||
safe_groups_dn = [] | |||
for member_dn in members_dn: | |||
safe_members_dn.append(safe_dn(member_dn)) | |||
for group_dn in groups_dn: | |||
safe_groups_dn.append(safe_dn(group_dn)) | |||
members_dn = safe_members_dn | |||
groups_dn = safe_groups_dn | |||
transaction_control = None | |||
error = False | |||
if transaction: | |||
transaction_control = connection.extend.novell.start_transaction() | |||
if not error: | |||
for member in members_dn: | |||
if fix: # checks for existance of member and for already assigned groups | |||
result = connection.search(member, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['securityEquals', 'groupMembership']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(member + ' not found') | |||
existing_security_equals = response[0]['attributes']['securityEquals'] if 'securityEquals' in response[0]['attributes'] else [] | |||
existing_group_membership = response[0]['attributes']['groupMembership'] if 'groupMembership' in response[0]['attributes'] else [] | |||
else: | |||
existing_security_equals = groups_dn | |||
existing_group_membership = groups_dn | |||
existing_security_equals = [element.lower() for element in existing_security_equals] | |||
existing_group_membership = [element.lower() for element in existing_group_membership] | |||
changes = dict() | |||
security_equals_to_remove = [element for element in groups_dn if element.lower() in existing_security_equals] | |||
group_membership_to_remove = [element for element in groups_dn if element.lower() in existing_group_membership] | |||
if security_equals_to_remove: | |||
changes['securityEquals'] = (MODIFY_DELETE, security_equals_to_remove) | |||
if group_membership_to_remove: | |||
changes['groupMembership'] = (MODIFY_DELETE, group_membership_to_remove) | |||
if changes: | |||
result = connection.modify(member, changes, controls=[transaction_control] if transaction else None) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
if not error: | |||
for group in groups_dn: | |||
if fix: # checks for existance of group and for already assigned members | |||
result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member', 'equivalentToMe']) | |||
if not connection.strategy.sync: | |||
response, result = connection.get_response(result) | |||
else: | |||
response, result = connection.response, connection.result | |||
if not result['description'] == 'success': | |||
raise LDAPInvalidDnError(group + ' not found') | |||
existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else [] | |||
existing_equivalent_to_me = response[0]['attributes']['equivalentToMe'] if 'equivalentToMe' in response[0]['attributes'] else [] | |||
else: | |||
existing_members = members_dn | |||
existing_equivalent_to_me = members_dn | |||
existing_members = [element.lower() for element in existing_members] | |||
existing_equivalent_to_me = [element.lower() for element in existing_equivalent_to_me] | |||
changes = dict() | |||
member_to_remove = [element for element in members_dn if element.lower() in existing_members] | |||
equivalent_to_me_to_remove = [element for element in members_dn if element.lower() in existing_equivalent_to_me] | |||
if member_to_remove: | |||
changes['member'] = (MODIFY_DELETE, member_to_remove) | |||
if equivalent_to_me_to_remove: | |||
changes['equivalentToMe'] = (MODIFY_DELETE, equivalent_to_me_to_remove) | |||
if changes: | |||
result = connection.modify(group, changes, controls=[transaction_control] if transaction else None) | |||
if not connection.strategy.sync: | |||
_, result = connection.get_response(result) | |||
else: | |||
result = connection.result | |||
if result['description'] != 'success': | |||
error = True | |||
break | |||
if transaction: | |||
if error: # aborts transaction in case of error in the modify operations | |||
result = connection.extend.novell.end_transaction(commit=False, controls=[transaction_control]) | |||
else: | |||
result = connection.extend.novell.end_transaction(commit=True, controls=[transaction_control]) | |||
if result['description'] != 'success': | |||
error = True | |||
return not error # return True if no error is raised in the LDAP operations |
@@ -0,0 +1,79 @@ | |||
""" | |||
""" | |||
# Created on 2014.08.07 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from datetime import datetime | |||
from pyasn1.type.univ import Integer | |||
from ...core.exceptions import LDAPExtensionError | |||
from ...protocol.novell import LDAPDN, ReplicaInfoRequestValue | |||
from ..operation import ExtendedOperation | |||
from ...utils.asn1 import decoder | |||
from ...utils.dn import safe_dn | |||
class ReplicaInfo(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.100.17' | |||
self.response_name = '2.16.840.1.113719.1.27.100.18' | |||
# self.asn1_spec = ReplicaInfoResponseValue() | |||
self.request_value = ReplicaInfoRequestValue() | |||
self.response_attribute = 'partition_dn' | |||
def __init__(self, connection, server_dn, partition_dn, controls=None): | |||
if connection.check_names: | |||
if server_dn: | |||
server_dn = safe_dn(server_dn) | |||
if partition_dn: | |||
partition_dn = safe_dn(partition_dn) | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
self.request_value['server_dn'] = server_dn | |||
self.request_value['partition_dn'] = partition_dn | |||
def populate_result(self): | |||
substrate = self.decoded_response | |||
try: | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['partition_id'] = int(decoded) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['replica_state'] = int(decoded) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['modification_time'] = datetime.utcfromtimestamp(int(decoded)) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['purge_time'] = datetime.utcfromtimestamp(int(decoded)) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['local_partition_id'] = int(decoded) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=LDAPDN()) | |||
self.result['partition_dn'] = str(decoded) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['replica_type'] = int(decoded) | |||
decoded, substrate = decoder.decode(substrate, asn1Spec=Integer()) | |||
self.result['flags'] = int(decoded) | |||
except Exception: | |||
raise LDAPExtensionError('unable to decode substrate') | |||
if substrate: | |||
raise LDAPExtensionError('unknown substrate remaining') |
@@ -0,0 +1,56 @@ | |||
""" | |||
""" | |||
# Created on 2016.04.14 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.novell import CreateGroupTypeRequestValue, CreateGroupTypeResponseValue, GroupingControlValue | |||
from ...protocol.controls import build_control | |||
class StartTransaction(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '2.16.840.1.113719.1.27.103.1' | |||
self.response_name = '2.16.840.1.113719.1.27.103.1' | |||
self.request_value = CreateGroupTypeRequestValue() | |||
self.asn1_spec = CreateGroupTypeResponseValue() | |||
def __init__(self, connection, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
self.request_value['createGroupType'] = '2.16.840.1.113719.1.27.103.7' # transactionGroupingType | |||
def populate_result(self): | |||
self.result['cookie'] = int(self.decoded_response['createGroupCookie']) | |||
try: | |||
self.result['value'] = self.decoded_response['createGroupValue'] | |||
except TypeError: | |||
self.result['value'] = None | |||
def set_response(self): | |||
try: | |||
grouping_cookie_value = GroupingControlValue() | |||
grouping_cookie_value['groupingCookie'] = self.result['cookie'] | |||
self.response_value = build_control('2.16.840.1.113719.1.27.103.7', True, grouping_cookie_value, encode_control_value=True) # groupingControl | |||
except TypeError: | |||
self.response_value = None | |||
@@ -0,0 +1,91 @@ | |||
""" | |||
""" | |||
# Created on 2014.07.04 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ..core.results import RESULT_SUCCESS | |||
from ..core.exceptions import LDAPExtensionError | |||
from ..utils.asn1 import decoder | |||
class ExtendedOperation(object): | |||
def __init__(self, connection, controls=None): | |||
self.connection = connection | |||
self.decoded_response = None | |||
self.result = None | |||
self.asn1_spec = None # if None the response_value is returned without encoding | |||
self.request_name = None | |||
self.response_name = None | |||
self.request_value = None | |||
self.response_value = None | |||
self.response_attribute = None | |||
self.controls = controls | |||
self.config() | |||
def send(self): | |||
if self.connection.check_names and self.connection.server.info is not None and self.connection.server.info.supported_extensions is not None: # checks if extension is supported | |||
for request_name in self.connection.server.info.supported_extensions: | |||
if request_name[0] == self.request_name: | |||
break | |||
else: | |||
raise LDAPExtensionError('extension not in DSA list of supported extensions') | |||
resp = self.connection.extended(self.request_name, self.request_value, self.controls) | |||
if not self.connection.strategy.sync: | |||
_, self.result = self.connection.get_response(resp) | |||
else: | |||
self.result = self.connection.result | |||
self.decode_response() | |||
self.populate_result() | |||
self.set_response() | |||
return self.response_value | |||
def populate_result(self): | |||
pass | |||
def decode_response(self): | |||
if not self.result: | |||
return None | |||
if self.result['result'] not in [RESULT_SUCCESS]: | |||
if self.connection.raise_exceptions: | |||
raise LDAPExtensionError('extended operation error: ' + self.result['description'] + ' - ' + self.result['message']) | |||
else: | |||
return None | |||
if not self.response_name or self.result['responseName'] == self.response_name: | |||
if self.result['responseValue']: | |||
if self.asn1_spec is not None: | |||
decoded, unprocessed = decoder.decode(self.result['responseValue'], asn1Spec=self.asn1_spec) | |||
if unprocessed: | |||
raise LDAPExtensionError('error decoding extended response value') | |||
self.decoded_response = decoded | |||
else: | |||
self.decoded_response = self.result['responseValue'] | |||
else: | |||
raise LDAPExtensionError('invalid response name received') | |||
def set_response(self): | |||
self.response_value = self.result[self.response_attribute] if self.result and self.response_attribute in self.result else None | |||
self.connection.response = self.response_value | |||
def config(self): | |||
pass |
@@ -0,0 +1,125 @@ | |||
""" | |||
""" | |||
# Created on 2014.07.08 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ... import SUBTREE, DEREF_ALWAYS | |||
from ...utils.dn import safe_dn | |||
from ...core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_SIZE_LIMIT_EXCEEDED | |||
from ...core.exceptions import LDAPOperationResult | |||
from ...utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED | |||
def paged_search_generator(connection, | |||
search_base, | |||
search_filter, | |||
search_scope=SUBTREE, | |||
dereference_aliases=DEREF_ALWAYS, | |||
attributes=None, | |||
size_limit=0, | |||
time_limit=0, | |||
types_only=False, | |||
get_operational_attributes=False, | |||
controls=None, | |||
paged_size=100, | |||
paged_criticality=False): | |||
if connection.check_names and search_base: | |||
search_base = safe_dn(search_base) | |||
responses = [] | |||
cookie = True # performs search at least one time | |||
while cookie: | |||
result = connection.search(search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
types_only, | |||
get_operational_attributes, | |||
controls, | |||
paged_size, | |||
paged_criticality, | |||
None if cookie is True else cookie) | |||
if not isinstance(result, bool): | |||
response, result = connection.get_response(result) | |||
else: | |||
response = connection.response | |||
result = connection.result | |||
responses.extend(response) | |||
try: | |||
cookie = result['controls']['1.2.840.113556.1.4.319']['value']['cookie'] | |||
except KeyError: | |||
cookie = None | |||
if result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS: | |||
if log_enabled(PROTOCOL): | |||
log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection) | |||
if result['result'] == RESULT_SIZE_LIMIT_EXCEEDED: | |||
while responses: | |||
yield responses.pop() | |||
raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type']) | |||
while responses: | |||
yield responses.pop() | |||
connection.response = None | |||
def paged_search_accumulator(connection, | |||
search_base, | |||
search_filter, | |||
search_scope=SUBTREE, | |||
dereference_aliases=DEREF_ALWAYS, | |||
attributes=None, | |||
size_limit=0, | |||
time_limit=0, | |||
types_only=False, | |||
get_operational_attributes=False, | |||
controls=None, | |||
paged_size=100, | |||
paged_criticality=False): | |||
if connection.check_names and search_base: | |||
search_base = safe_dn(search_base) | |||
responses = [] | |||
for response in paged_search_generator(connection, | |||
search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
types_only, | |||
get_operational_attributes, | |||
controls, | |||
paged_size, | |||
paged_criticality): | |||
responses.append(response) | |||
connection.response = responses | |||
return responses |
@@ -0,0 +1,121 @@ | |||
""" | |||
""" | |||
# Created on 2016.07.08 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2016 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
try: | |||
from queue import Empty | |||
except ImportError: # Python 2 | |||
# noinspection PyUnresolvedReferences | |||
from Queue import Empty | |||
from ...core.exceptions import LDAPExtensionError | |||
from ...protocol.persistentSearch import persistent_search_control | |||
from ... import SEQUENCE_TYPES | |||
from ...utils.dn import safe_dn | |||
class PersistentSearch(object): | |||
def __init__(self, | |||
connection, | |||
search_base, | |||
search_filter, | |||
search_scope, | |||
dereference_aliases, | |||
attributes, | |||
size_limit, | |||
time_limit, | |||
controls, | |||
changes_only, | |||
events_type, | |||
notifications, | |||
streaming, | |||
callback | |||
): | |||
if connection.strategy.sync: | |||
raise LDAPExtensionError('Persistent Search needs an asynchronous streaming connection') | |||
if connection.check_names and search_base: | |||
search_base = safe_dn(search_base) | |||
self.connection = connection | |||
self.changes_only = changes_only | |||
self.notifications = notifications | |||
self.message_id = None | |||
self.base = search_base | |||
self.filter = search_filter | |||
self.scope = search_scope | |||
self.dereference_aliases = dereference_aliases | |||
self.attributes = attributes | |||
self.size_limit = size_limit | |||
self.time_limit = time_limit | |||
self.connection.strategy.streaming = streaming | |||
if callback and callable(callback): | |||
self.connection.strategy.callback = callback | |||
elif callback: | |||
raise LDAPExtensionError('callback is not callable') | |||
if not isinstance(controls, SEQUENCE_TYPES): | |||
self.controls = [] | |||
else: | |||
self.controls = controls | |||
self.controls.append(persistent_search_control(events_type, changes_only, notifications)) | |||
self.start() | |||
def start(self): | |||
if self.message_id: # persistent search already started | |||
return | |||
if not self.connection.bound: | |||
self.connection.bind() | |||
with self.connection.strategy.async_lock: | |||
self.message_id = self.connection.search(search_base=self.base, | |||
search_filter=self.filter, | |||
search_scope=self.scope, | |||
dereference_aliases=self.dereference_aliases, | |||
attributes=self.attributes, | |||
size_limit=self.size_limit, | |||
time_limit=self.time_limit, | |||
controls=self.controls) | |||
self.connection.strategy.persistent_search_message_id = self.message_id | |||
def stop(self): | |||
self.connection.abandon(self.message_id) | |||
self.connection.unbind() | |||
if self.message_id in self.connection.strategy._responses: | |||
del self.connection.strategy._responses[self.message_id] | |||
if hasattr(self.connection.strategy, '_requests') and self.message_id in self.connection.strategy._requests: # asynchronous strategy has a dict of request that could be returned by get_response() | |||
del self.connection.strategy._requests[self.message_id] | |||
self.connection.strategy.persistent_search_message_id = None | |||
self.message_id = None | |||
def next(self): | |||
if not self.connection.strategy.streaming and not self.connection.strategy.callback: | |||
try: | |||
return self.connection.strategy.events.get_nowait() | |||
except Empty: | |||
return None | |||
raise LDAPExtensionError('Persistent search is not accumulating events in queue') |
@@ -0,0 +1,72 @@ | |||
""" | |||
""" | |||
# Created on 2014.04.30 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ... import HASHED_NONE | |||
from ...extend.operation import ExtendedOperation | |||
from ...protocol.rfc3062 import PasswdModifyRequestValue, PasswdModifyResponseValue | |||
from ...utils.hashed import hashed | |||
from ...protocol.sasl.sasl import validate_simple_password | |||
from ...utils.dn import safe_dn | |||
from ...core.results import RESULT_SUCCESS | |||
# implements RFC3062 | |||
class ModifyPassword(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '1.3.6.1.4.1.4203.1.11.1' | |||
self.request_value = PasswdModifyRequestValue() | |||
self.asn1_spec = PasswdModifyResponseValue() | |||
self.response_attribute = 'new_password' | |||
def __init__(self, connection, user=None, old_password=None, new_password=None, hash_algorithm=None, salt=None, controls=None): | |||
ExtendedOperation.__init__(self, connection, controls) # calls super __init__() | |||
if user: | |||
if connection.check_names: | |||
user = safe_dn(user) | |||
self.request_value['userIdentity'] = user | |||
if old_password: | |||
if not isinstance(old_password, bytes): # bytes are returned raw, as per RFC (4.2) | |||
old_password = validate_simple_password(old_password, True) | |||
self.request_value['oldPasswd'] = old_password | |||
if new_password: | |||
if not isinstance(new_password, bytes): # bytes are returned raw, as per RFC (4.2) | |||
new_password = validate_simple_password(new_password, True) | |||
if hash_algorithm is None or hash_algorithm == HASHED_NONE: | |||
self.request_value['newPasswd'] = new_password | |||
else: | |||
self.request_value['newPasswd'] = hashed(hash_algorithm, new_password, salt) | |||
def populate_result(self): | |||
try: | |||
self.result[self.response_attribute] = str(self.decoded_response['genPasswd']) | |||
except TypeError: # optional field can be absent, so returns True if operation is successful else False | |||
if self.result['result'] == RESULT_SUCCESS: | |||
self.result[self.response_attribute] = True | |||
else: # change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result'] | |||
self.result[self.response_attribute] = False | |||
if not self.connection.raise_exceptions: | |||
from ...core.exceptions import LDAPOperationResult | |||
raise LDAPOperationResult(result=self.result['result'], description=self.result['description'], dn=self.result['dn'], message=self.result['message'], response_type=self.result['type']) |
@@ -0,0 +1,40 @@ | |||
""" | |||
""" | |||
# Created on 2014.04.30 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2014 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
# implements RFC4532 | |||
from ...extend.operation import ExtendedOperation | |||
from ...utils.conv import to_unicode | |||
class WhoAmI(ExtendedOperation): | |||
def config(self): | |||
self.request_name = '1.3.6.1.4.1.4203.1.11.3' | |||
self.response_attribute = 'authzid' | |||
def populate_result(self): | |||
try: | |||
self.result['authzid'] = to_unicode(self.decoded_response) if self.decoded_response else None | |||
except TypeError: | |||
self.result['authzid'] = self.decoded_response if self.decoded_response else None |
@@ -0,0 +1,36 @@ | |||
""" | |||
""" | |||
# Created on 2013.05.31 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2013 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from ..protocol.rfc4511 import AbandonRequest, MessageID | |||
def abandon_operation(msg_id): | |||
# AbandonRequest ::= [APPLICATION 16] MessageID | |||
request = AbandonRequest(MessageID(msg_id)) | |||
return request | |||
def abandon_request_to_dict(request): | |||
return {'messageId': str(request)} |
@@ -0,0 +1,72 @@ | |||
""" | |||
""" | |||
# Created on 2013.05.31 | |||
# | |||
# Author: Giovanni Cannata | |||
# | |||
# Copyright 2013 - 2018 Giovanni Cannata | |||
# | |||
# This file is part of ldap3. | |||
# | |||
# ldap3 is free software: you can redistribute it and/or modify | |||
# it under the terms of the GNU Lesser General Public License as published | |||
# by the Free Software Foundation, either version 3 of the License, or | |||
# (at your option) any later version. | |||
# | |||
# ldap3 is distributed in the hope that it will be useful, | |||
# but WITHOUT ANY WARRANTY; without even the implied warranty of | |||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |||
# GNU Lesser General Public License for more details. | |||
# | |||
# You should have received a copy of the GNU Lesser General Public License | |||
# along with ldap3 in the COPYING and COPYING.LESSER files. | |||
# If not, see <http://www.gnu.org/licenses/>. | |||
from .. import SEQUENCE_TYPES | |||
from ..protocol.rfc4511 import AddRequest, LDAPDN, AttributeList, Attribute, AttributeDescription, ResultCode, Vals | |||
from ..protocol.convert import referrals_to_list, attributes_to_dict, validate_attribute_value, prepare_for_sending | |||
def add_operation(dn, | |||
attributes, | |||
auto_encode, | |||
schema=None, | |||
validator=None, | |||
check_names=False): | |||
# AddRequest ::= [APPLICATION 8] SEQUENCE { | |||
# entry LDAPDN, | |||
# attributes AttributeList } | |||
# | |||
# attributes is a dictionary in the form 'attribute': ['val1', 'val2', 'valN'] | |||
attribute_list = AttributeList() | |||
for pos, attribute in enumerate(attributes): | |||
attribute_list[pos] = Attribute() | |||
attribute_list[pos]['type'] = AttributeDescription(attribute) | |||
vals = Vals() # changed from ValsAtLeast1() for allowing empty member value in groups | |||
if isinstance(attributes[attribute], SEQUENCE_TYPES): | |||
for index, value in enumerate(attributes[attribute]): | |||
vals.setComponentByPosition(index, prepare_for_sending(validate_attribute_value(schema, attribute, value, auto_encode, validator, check_names))) | |||
else: | |||
vals.setComponentByPosition(0, prepare_for_sending(validate_attribute_value(schema, attribute, attributes[attribute], auto_encode, validator, check_names))) | |||
attribute_list[pos]['vals'] = vals | |||
request = AddRequest() | |||
request['entry'] = LDAPDN(dn) | |||
request['attributes'] = attribute_list | |||
return request | |||
def add_request_to_dict(request): | |||
return {'entry': str(request['entry']), | |||
'attributes': attributes_to_dict(request['attributes'])} | |||
def add_response_to_dict(response): | |||
return {'result': int(response['resultCode']), | |||
'description': ResultCode().getNamedValues().getName(response['resultCode']), | |||
'dn': str(response['matchedDN']), | |||
'message': str(response['diagnosticMessage']), | |||
'referrals': referrals_to_list(response['referral'])} |