summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--PKG-INFO3
-rw-r--r--_version.json2
-rw-r--r--ldap3.egg-info/PKG-INFO3
-rw-r--r--ldap3.egg-info/SOURCES.txt3
-rw-r--r--ldap3/__init__.py6
-rw-r--r--ldap3/abstract/__init__.py2
-rw-r--r--ldap3/abstract/attrDef.py2
-rw-r--r--ldap3/abstract/attribute.py577
-rw-r--r--ldap3/abstract/cursor.py1800
-rw-r--r--ldap3/abstract/entry.py1361
-rw-r--r--ldap3/abstract/objectDef.py31
-rw-r--r--ldap3/core/connection.py3045
-rw-r--r--ldap3/core/exceptions.py1206
-rw-r--r--ldap3/core/pooling.py635
-rw-r--r--ldap3/core/results.py4
-rw-r--r--ldap3/core/server.py1235
-rw-r--r--ldap3/core/timezone.py2
-rw-r--r--ldap3/core/tls.py653
-rw-r--r--ldap3/core/usage.py2
-rw-r--r--ldap3/extend/__init__.py29
-rw-r--r--ldap3/extend/microsoft/addMembersToGroups.py174
-rw-r--r--ldap3/extend/microsoft/dirSync.py2
-rw-r--r--ldap3/extend/microsoft/modifyPassword.py2
-rw-r--r--ldap3/extend/microsoft/removeMembersFromGroups.py185
-rw-r--r--ldap3/extend/microsoft/unlockAccount.py4
-rw-r--r--ldap3/extend/novell/addMembersToGroups.py2
-rw-r--r--ldap3/extend/novell/checkGroupsMemberships.py2
-rw-r--r--ldap3/extend/novell/endTransaction.py2
-rw-r--r--ldap3/extend/novell/getBindDn.py2
-rw-r--r--ldap3/extend/novell/listReplicas.py4
-rw-r--r--ldap3/extend/novell/nmasGetUniversalPassword.py16
-rw-r--r--ldap3/extend/novell/nmasSetUniversalPassword.py2
-rw-r--r--ldap3/extend/novell/partition_entry_count.py2
-rw-r--r--ldap3/extend/novell/removeMembersFromGroups.py2
-rw-r--r--ldap3/extend/novell/replicaInfo.py2
-rw-r--r--ldap3/extend/novell/startTransaction.py2
-rw-r--r--ldap3/extend/operation.py2
-rw-r--r--ldap3/extend/standard/PagedSearch.py35
-rw-r--r--ldap3/extend/standard/PersistentSearch.py28
-rw-r--r--ldap3/extend/standard/modifyPassword.py4
-rw-r--r--ldap3/extend/standard/whoAmI.py4
-rw-r--r--ldap3/operation/abandon.py2
-rw-r--r--ldap3/operation/add.py2
-rw-r--r--ldap3/operation/bind.py8
-rw-r--r--ldap3/operation/compare.py2
-rw-r--r--ldap3/operation/delete.py2
-rw-r--r--ldap3/operation/extended.py4
-rw-r--r--ldap3/operation/modify.py2
-rw-r--r--ldap3/operation/modifyDn.py2
-rw-r--r--ldap3/operation/search.py50
-rw-r--r--ldap3/operation/unbind.py2
-rw-r--r--ldap3/protocol/controls.py2
-rw-r--r--ldap3/protocol/convert.py33
-rw-r--r--ldap3/protocol/formatters/formatters.py723
-rw-r--r--ldap3/protocol/formatters/standard.py465
-rw-r--r--ldap3/protocol/formatters/validators.py785
-rw-r--r--ldap3/protocol/microsoft.py16
-rw-r--r--ldap3/protocol/novell.py2
-rw-r--r--ldap3/protocol/oid.py2
-rw-r--r--ldap3/protocol/persistentSearch.py2
-rw-r--r--ldap3/protocol/rfc2696.py2
-rw-r--r--ldap3/protocol/rfc2849.py51
-rw-r--r--ldap3/protocol/rfc3062.py2
-rw-r--r--ldap3/protocol/rfc4511.py2
-rw-r--r--ldap3/protocol/rfc4512.py24
-rw-r--r--ldap3/protocol/rfc4527.py2
-rw-r--r--ldap3/protocol/sasl/digestMd5.py4
-rw-r--r--ldap3/protocol/sasl/external.py2
-rw-r--r--ldap3/protocol/sasl/kerberos.py15
-rw-r--r--ldap3/protocol/sasl/plain.py2
-rw-r--r--ldap3/protocol/sasl/sasl.py2
-rw-r--r--ldap3/protocol/schemas/ad2012R2.py2
-rw-r--r--ldap3/protocol/schemas/ds389.py2
-rw-r--r--ldap3/protocol/schemas/edir888.py9
-rw-r--r--ldap3/protocol/schemas/edir914.py1157
-rw-r--r--ldap3/protocol/schemas/slapd24.py2
-rw-r--r--ldap3/strategy/asyncStream.py9
-rw-r--r--ldap3/strategy/asynchronous.py474
-rw-r--r--ldap3/strategy/base.py1776
-rw-r--r--ldap3/strategy/ldifProducer.py298
-rw-r--r--ldap3/strategy/mockAsync.py2
-rw-r--r--ldap3/strategy/mockBase.py104
-rw-r--r--ldap3/strategy/mockSync.py2
-rw-r--r--ldap3/strategy/restartable.py52
-rw-r--r--ldap3/strategy/reusable.py974
-rw-r--r--ldap3/strategy/sync.py427
-rw-r--r--ldap3/utils/asn1.py2
-rw-r--r--ldap3/utils/ciDict.py29
-rw-r--r--ldap3/utils/config.py43
-rw-r--r--ldap3/utils/conv.py494
-rw-r--r--ldap3/utils/dn.py762
-rw-r--r--ldap3/utils/hashed.py2
-rw-r--r--ldap3/utils/log.py12
-rw-r--r--ldap3/utils/ntlm.py6
-rw-r--r--ldap3/utils/port_validators.py37
-rw-r--r--ldap3/utils/repr.py2
-rw-r--r--ldap3/utils/tls_backport.py2
-rw-r--r--ldap3/utils/uri.py2
-rw-r--r--ldap3/version.py8
-rw-r--r--setup.py111
-rw-r--r--test/testAbandonOperation.py2
-rw-r--r--test/testAbstractionAuxiliaryClass.py104
-rw-r--r--test/testAbstractionDefs.py2
-rw-r--r--test/testAbstractionDefsFromSchema.py2
-rw-r--r--test/testAbstractionSearch.py2
-rw-r--r--test/testAbstractionWrite.py3
-rw-r--r--test/testAddMembersToGroups.py2
-rw-r--r--test/testAddOperation.py26
-rw-r--r--test/testBindOperation.py2
-rw-r--r--test/testBytesOperation.py2
-rw-r--r--test/testCaseInsensitiveDictionary.py2
-rw-r--r--test/testCaseInsensitiveWithAliasDictionary.py14
-rw-r--r--test/testCheckGroupMembership.py2
-rw-r--r--test/testCheckNamesFalse.py2
-rw-r--r--test/testCheckNamesTrue.py2
-rw-r--r--test/testCheckedAttributes.py2
-rw-r--r--test/testCompareOperation.py2
-rw-r--r--test/testConnection.py2
-rw-r--r--test/testControls.py2
-rw-r--r--test/testDeleteOperation.py2
-rw-r--r--test/testDnParsing.py316
-rw-r--r--test/testExceptions.py2
-rw-r--r--test/testExtendedOperations.py4
-rw-r--r--test/testExtensions.py2
-rw-r--r--test/testFormatGeneralizedTime.py2
-rw-r--r--test/testLDIF-change.py2
-rw-r--r--test/testLDIF-content.py2
-rw-r--r--test/testMicrosoftAD.py8
-rw-r--r--test/testMockASyncStrategy.py13
-rw-r--r--test/testMockBase.py21
-rw-r--r--test/testMockSyncStrategy.py78
-rw-r--r--test/testModifyDNOperation.py2
-rw-r--r--test/testModifyOperation.py2
-rw-r--r--test/testOfflineSchema.py6
-rw-r--r--test/testParseSearchFilter.py32
-rw-r--r--test/testRebindOperation.py2
-rw-r--r--test/testRemoveMembersFromGroups.py2
-rw-r--r--test/testRestartable.py41
-rw-r--r--test/testSaslPrep.py2
-rw-r--r--test/testSchema.py2
-rw-r--r--test/testSearchAndModifyEntries.py2
-rw-r--r--test/testSearchOperation.py55
-rw-r--r--test/testSearchOperationEntries.py2
-rw-r--r--test/testSearchOperationJSON.py2
-rw-r--r--test/testTls.py379
-rw-r--r--test/testTransactions.py2
-rw-r--r--test/testValidators.py366
-rw-r--r--test/testWriterCursor.py2
148 files changed, 12052 insertions, 9573 deletions
diff --git a/PKG-INFO b/PKG-INFO
index 49c9c7b..6cabfde 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,12 +1,11 @@
Metadata-Version: 1.1
Name: ldap3
-Version: 2.4.1
+Version: 2.7
Summary: A strictly RFC 4510 conforming LDAP V3 pure Python client library
Home-page: https://github.com/cannatag/ldap3
Author: Giovanni Cannata
Author-email: cannatag@gmail.com
License: LGPL v3
-Description-Content-Type: UNKNOWN
Description: LDAP3
=====
diff --git a/_version.json b/_version.json
index a71850e..6ae435c 100644
--- a/_version.json
+++ b/_version.json
@@ -6,6 +6,6 @@
"url": "https://github.com/cannatag/ldap3",
"description": "A strictly RFC 4510 conforming LDAP V3 pure Python client library",
"author": "Giovanni Cannata",
- "version": "2.4.1",
+ "version": "2.7",
"license": "LGPL v3"
}
diff --git a/ldap3.egg-info/PKG-INFO b/ldap3.egg-info/PKG-INFO
index 49c9c7b..6cabfde 100644
--- a/ldap3.egg-info/PKG-INFO
+++ b/ldap3.egg-info/PKG-INFO
@@ -1,12 +1,11 @@
Metadata-Version: 1.1
Name: ldap3
-Version: 2.4.1
+Version: 2.7
Summary: A strictly RFC 4510 conforming LDAP V3 pure Python client library
Home-page: https://github.com/cannatag/ldap3
Author: Giovanni Cannata
Author-email: cannatag@gmail.com
License: LGPL v3
-Description-Content-Type: UNKNOWN
Description: LDAP3
=====
diff --git a/ldap3.egg-info/SOURCES.txt b/ldap3.egg-info/SOURCES.txt
index 10cfbbe..fb58879 100644
--- a/ldap3.egg-info/SOURCES.txt
+++ b/ldap3.egg-info/SOURCES.txt
@@ -86,6 +86,7 @@ setup.py
./ldap3/protocol/schemas/ad2012R2.py
./ldap3/protocol/schemas/ds389.py
./ldap3/protocol/schemas/edir888.py
+./ldap3/protocol/schemas/edir914.py
./ldap3/protocol/schemas/slapd24.py
./ldap3/strategy/__init__.py
./ldap3/strategy/asyncStream.py
@@ -108,6 +109,7 @@ setup.py
./ldap3/utils/log.py
./ldap3/utils/ntlm.py
./ldap3/utils/ordDict.py
+./ldap3/utils/port_validators.py
./ldap3/utils/repr.py
./ldap3/utils/tls_backport.py
./ldap3/utils/uri.py
@@ -117,6 +119,7 @@ ldap3.egg-info/dependency_links.txt
ldap3.egg-info/requires.txt
ldap3.egg-info/top_level.txt
test/testAbandonOperation.py
+test/testAbstractionAuxiliaryClass.py
test/testAbstractionDefs.py
test/testAbstractionDefsFromSchema.py
test/testAbstractionSearch.py
diff --git a/ldap3/__init__.py b/ldap3/__init__.py
index f9376c2..9bb5435 100644
--- a/ldap3/__init__.py
+++ b/ldap3/__init__.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -37,6 +37,7 @@ DIGEST_MD5 = 'DIGEST-MD5'
KERBEROS = GSSAPI = 'GSSAPI'
PLAIN = 'PLAIN'
+AUTO_BIND_DEFAULT = 'DEFAULT' # binds connection whens using "with" context manager
AUTO_BIND_NONE = 'NONE' # same as False
AUTO_BIND_NO_TLS = 'NO_TLS' # same as True
AUTO_BIND_TLS_BEFORE_BIND = 'TLS_BEFORE_BIND'
@@ -88,6 +89,7 @@ SCHEMA = 'SCHEMA'
ALL = 'ALL'
OFFLINE_EDIR_8_8_8 = 'EDIR_8_8_8'
+OFFLINE_EDIR_9_1_4 = 'EDIR_9_1_4'
OFFLINE_AD_2012_R2 = 'AD_2012_R2'
OFFLINE_SLAPD_2_4 = 'SLAPD_2_4'
OFFLINE_DS389_1_3_3 = 'DS389_1_3_3'
@@ -112,8 +114,10 @@ HASHED_SALTED_MD5 = 'SALTED_MD5'
if str is not bytes: # Python 3
NUMERIC_TYPES = (int, float)
+ INTEGER_TYPES = (int, )
else:
NUMERIC_TYPES = (int, long, float)
+ INTEGER_TYPES = (int, long)
# types for string and sequence
if str is not bytes: # Python 3
diff --git a/ldap3/abstract/__init__.py b/ldap3/abstract/__init__.py
index c40f838..29aabb3 100644
--- a/ldap3/abstract/__init__.py
+++ b/ldap3/abstract/__init__.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/abstract/attrDef.py b/ldap3/abstract/attrDef.py
index d954e25..caffb53 100644
--- a/ldap3/abstract/attrDef.py
+++ b/ldap3/abstract/attrDef.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
diff --git a/ldap3/abstract/attribute.py b/ldap3/abstract/attribute.py
index 5d1fa97..5d33cc7 100644
--- a/ldap3/abstract/attribute.py
+++ b/ldap3/abstract/attribute.py
@@ -1,287 +1,290 @@
-"""
-"""
-
-# Created on 2014.01.06
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from os import linesep
-
-from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES
-from ..core.exceptions import LDAPCursorError
-from ..utils.repr import to_stdout_encoding
-from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
-
-
-# noinspection PyUnresolvedReferences
-class Attribute(object):
- """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry
-
- Attribute object is read only
-
- - values: contain the processed attribute values
- - raw_values': contain the unprocessed attribute values
-
-
- """
-
- def __init__(self, attr_def, entry, cursor):
- self.key = attr_def.key
- self.definition = attr_def
- self.values = []
- self.raw_values = []
- self.response = None
- self.entry = entry
- self.cursor = cursor
- other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None
- self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names
-
- def __repr__(self):
- if len(self.values) == 1:
- r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
- elif len(self.values) > 1:
- r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
- filler = ' ' * (len(self.key) + 6)
- for value in self.values[1:]:
- r += linesep + filler + to_stdout_encoding(value)
- else:
- r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('<no value>')
-
- return r
-
- def __str__(self):
- if len(self.values) == 1:
- return to_stdout_encoding(self.values[0])
- else:
- return to_stdout_encoding(self.values)
-
- def __len__(self):
- return len(self.values)
-
- def __iter__(self):
- return self.values.__iter__()
-
- def __getitem__(self, item):
- return self.values[item]
-
- def __eq__(self, other):
- try:
- if self.value == other:
- return True
- except Exception:
- return False
-
- def __ne__(self, other):
- return not self == other
-
- @property
- def value(self):
- """
- :return: The single value or a list of values of the attribute.
- """
- if not self.values:
- return None
-
- return self.values[0] if len(self.values) == 1 else self.values
-
-
-class OperationalAttribute(Attribute):
- """Operational attribute/values object. Include the search result of an
- operational attribute in an entry
-
- OperationalAttribute object is read only
-
- - values: contains the processed attribute values
- - raw_values: contains the unprocessed attribute values
-
- It may not have an AttrDef
-
- """
-
- def __repr__(self):
- if len(self.values) == 1:
- r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
- elif len(self.values) > 1:
- r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
- filler = ' ' * (len(self.key) + 6)
- for value in sorted(self.values[1:]):
- r += linesep + filler + to_stdout_encoding(value)
- else:
- r = ''
-
- return r
-
-
-class WritableAttribute(Attribute):
- def __repr__(self):
- filler = ' ' * (len(self.key) + 6)
- if len(self.values) == 1:
- r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
- elif len(self.values) > 1:
- r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
- for value in self.values[1:]:
- r += linesep + filler + to_stdout_encoding(value)
- else:
- r = to_stdout_encoding(self.key) + to_stdout_encoding(': <Virtual>')
- if self.definition.name in self.entry._changes:
- r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name])
- return r
-
- def __iadd__(self, other):
- self.add(other)
- return Ellipsis # hack to avoid calling set() in entry __setattr__
-
- def __isub__(self, other):
- self.delete(other)
- return Ellipsis # hack to avoid calling set_value in entry __setattr__
-
- def _update_changes(self, changes, remove_old=False):
- # checks for friendly key in AttrDef and uses the real attribute name
- if self.definition and self.definition.name:
- key = self.definition.name
- else:
- key = self.key
-
- if key not in self.entry._changes:
- self.entry._changes[key] = []
- elif remove_old:
- self.entry._changes[key] = [] # remove old changes (for removing attribute)
-
- self.entry._changes[key].append(changes)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn)
- self.entry._state.set_status(STATUS_PENDING_CHANGES)
-
- def add(self, values):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
- # new value for attribute to commit with a MODIFY_ADD
- if self.entry._state._initial_status == STATUS_VIRTUAL:
- error_message = 'cannot add an attribute value in a new entry'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
- error_message = self.entry.entry_status + ' - cannot add attributes'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if values is None:
- error_message = 'value to add cannot be None'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if values is not None:
- validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
- if validated is False:
- error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- elif validated is not True: # a valid LDAP value equivalent to the actual values
- values = validated
- self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values]))
-
- def set(self, values):
- # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
- if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
- error_message = self.entry.entry_status + ' - cannot set attributes'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if values is None:
- error_message = 'new value cannot be None'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
- if validated is False:
- error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- elif validated is not True: # a valid LDAP value equivalent to the actual values
- values = validated
- self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]))
-
- def delete(self, values):
- # value for attribute to delete in commit with a MODIFY_DELETE
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
- if self.entry._state._initial_status == STATUS_VIRTUAL:
- error_message = 'cannot delete an attribute value in a new entry'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
- error_message = self.entry.entry_status + ' - cannot delete attributes'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if values is None:
- error_message = 'value to delete cannot be None'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if not isinstance(values, SEQUENCE_TYPES):
- values = [values]
- for single_value in values:
- if single_value not in self.values:
- error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values))
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self._update_changes((MODIFY_DELETE, values))
-
- def remove(self):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
- if self.entry._state._initial_status == STATUS_VIRTUAL:
- error_message = 'cannot remove an attribute in a new entry'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
- error_message = self.entry.entry_status + ' - cannot remove attributes'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self._update_changes((MODIFY_REPLACE, []), True)
-
- def discard(self):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
- del self.entry._changes[self.key]
- if not self.entry._changes:
- self.entry._state.set_status(self.entry._state._initial_status)
-
- @property
- def virtual(self):
- return False if len(self.values) else True
-
- @property
- def changes(self):
- if self.key in self.entry._changes:
- return self.entry._changes[self.key]
- return None
+"""
+"""
+
+# Created on 2014.01.06
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from os import linesep
+
+from .. import MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, SEQUENCE_TYPES
+from ..core.exceptions import LDAPCursorError
+from ..utils.repr import to_stdout_encoding
+from . import STATUS_PENDING_CHANGES, STATUS_VIRTUAL, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
+
+
+# noinspection PyUnresolvedReferences
+class Attribute(object):
+ """Attribute/values object, it includes the search result (after post_query transformation) of each attribute in an entry
+
+ Attribute object is read only
+
+ - values: contain the processed attribute values
+ - raw_values': contain the unprocessed attribute values
+
+
+ """
+
+ def __init__(self, attr_def, entry, cursor):
+ self.key = attr_def.key
+ self.definition = attr_def
+ self.values = []
+ self.raw_values = []
+ self.response = None
+ self.entry = entry
+ self.cursor = cursor
+ other_names = [name for name in attr_def.oid_info.name if self.key.lower() != name.lower()] if attr_def.oid_info else None
+ self.other_names = set(other_names) if other_names else None # self.other_names is None if there are no short names, else is a set of secondary names
+
+ def __repr__(self):
+ if len(self.values) == 1:
+ r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
+ elif len(self.values) > 1:
+ r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
+ filler = ' ' * (len(self.key) + 6)
+ for value in self.values[1:]:
+ r += linesep + filler + to_stdout_encoding(value)
+ else:
+ r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding('<no value>')
+
+ return r
+
+ def __str__(self):
+ if len(self.values) == 1:
+ return to_stdout_encoding(self.values[0])
+ else:
+ return to_stdout_encoding(self.values)
+
+ def __len__(self):
+ return len(self.values)
+
+ def __iter__(self):
+ return self.values.__iter__()
+
+ def __getitem__(self, item):
+ return self.values[item]
+
+ def __getstate__(self):
+ cpy = dict(self.__dict__)
+ cpy['cursor'] = None
+ return cpy
+
+ def __eq__(self, other):
+ try:
+ if self.value == other:
+ return True
+ except Exception:
+ return False
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def value(self):
+ """
+ :return: The single value or a list of values of the attribute.
+ """
+ if not self.values:
+ return None
+
+ return self.values[0] if len(self.values) == 1 else self.values
+
+
+class OperationalAttribute(Attribute):
+ """Operational attribute/values object. Include the search result of an
+ operational attribute in an entry
+
+ OperationalAttribute object is read only
+
+ - values: contains the processed attribute values
+ - raw_values: contains the unprocessed attribute values
+
+ It may not have an AttrDef
+
+ """
+
+ def __repr__(self):
+ if len(self.values) == 1:
+ r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
+ elif len(self.values) > 1:
+ r = to_stdout_encoding(self.key) + ' [OPERATIONAL]: ' + to_stdout_encoding(self.values[0])
+ filler = ' ' * (len(self.key) + 6)
+ for value in sorted(self.values[1:]):
+ r += linesep + filler + to_stdout_encoding(value)
+ else:
+ r = ''
+
+ return r
+
+
+class WritableAttribute(Attribute):
+ def __repr__(self):
+ filler = ' ' * (len(self.key) + 6)
+ if len(self.values) == 1:
+ r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
+ elif len(self.values) > 1:
+ r = to_stdout_encoding(self.key) + ': ' + to_stdout_encoding(self.values[0])
+ for value in self.values[1:]:
+ r += linesep + filler + to_stdout_encoding(value)
+ else:
+ r = to_stdout_encoding(self.key) + to_stdout_encoding(': <Virtual>')
+ if self.definition.name in self.entry._changes:
+ r += linesep + filler + 'CHANGES: ' + str(self.entry._changes[self.definition.name])
+ return r
+
+ def __iadd__(self, other):
+ self.add(other)
+ return Ellipsis # hack to avoid calling set() in entry __setattr__
+
+ def __isub__(self, other):
+ self.delete(other)
+ return Ellipsis # hack to avoid calling set_value in entry __setattr__
+
+ def _update_changes(self, changes, remove_old=False):
+ # checks for friendly key in AttrDef and uses the real attribute name
+ if self.definition and self.definition.name:
+ key = self.definition.name
+ else:
+ key = self.key
+
+ if key not in self.entry._changes or remove_old: # remove old changes (for removing attribute)
+ self.entry._changes[key] = []
+
+ self.entry._changes[key].append(changes)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'updated changes <%r> for <%s> attribute in <%s> entry', changes, self.key, self.entry.entry_dn)
+ self.entry._state.set_status(STATUS_PENDING_CHANGES)
+
+ def add(self, values):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'adding %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
+ # new value for attribute to commit with a MODIFY_ADD
+ if self.entry._state._initial_status == STATUS_VIRTUAL:
+ error_message = 'cannot perform a modify operation in a new entry'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
+ error_message = self.entry.entry_status + ' - cannot add attributes'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if values is None:
+ error_message = 'value to add cannot be None'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if values is not None:
+ validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
+ if validated is False:
+ error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ elif validated is not True: # a valid LDAP value equivalent to the actual values
+ values = validated
+ self._update_changes((MODIFY_ADD, values if isinstance(values, SEQUENCE_TYPES) else [values]))
+
+ def set(self, values):
+ # new value for attribute to commit with a MODIFY_REPLACE, old values are deleted
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'setting %r to <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
+ if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
+ error_message = self.entry.entry_status + ' - cannot set attributes'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if values is None:
+ error_message = 'new value cannot be None'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ validated = self.definition.validate(values) # returns True, False or a value to substitute to the actual values
+ if validated is False:
+ error_message = 'value \'%s\' non valid for attribute \'%s\'' % (values, self.key)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ elif validated is not True: # a valid LDAP value equivalent to the actual values
+ values = validated
+ self._update_changes((MODIFY_REPLACE, values if isinstance(values, SEQUENCE_TYPES) else [values]), remove_old=True)
+
+ def delete(self, values):
+ # value for attribute to delete in commit with a MODIFY_DELETE
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'deleting %r from <%s> attribute in <%s> entry', values, self.key, self.entry.entry_dn)
+ if self.entry._state._initial_status == STATUS_VIRTUAL:
+ error_message = 'cannot delete an attribute value in a new entry'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
+ error_message = self.entry.entry_status + ' - cannot delete attributes'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if values is None:
+ error_message = 'value to delete cannot be None'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if not isinstance(values, SEQUENCE_TYPES):
+ values = [values]
+ for single_value in values:
+ if single_value not in self.values:
+ error_message = 'value \'%s\' not present in \'%s\'' % (single_value, ', '.join(self.values))
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self._update_changes((MODIFY_DELETE, values))
+
+ def remove(self):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'removing <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
+ if self.entry._state._initial_status == STATUS_VIRTUAL:
+ error_message = 'cannot remove an attribute in a new entry'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if self.entry.entry_status in [STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING]:
+ error_message = self.entry.entry_status + ' - cannot remove attributes'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self._update_changes((MODIFY_REPLACE, []), True)
+
+ def discard(self):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'discarding <%s> attribute in <%s> entry', self.key, self.entry.entry_dn)
+ del self.entry._changes[self.key]
+ if not self.entry._changes:
+ self.entry._state.set_status(self.entry._state._initial_status)
+
+ @property
+ def virtual(self):
+ return False if len(self.values) else True
+
+ @property
+ def changes(self):
+ if self.key in self.entry._changes:
+ return self.entry._changes[self.key]
+ return None
diff --git a/ldap3/abstract/cursor.py b/ldap3/abstract/cursor.py
index 0710642..9259a2c 100644
--- a/ldap3/abstract/cursor.py
+++ b/ldap3/abstract/cursor.py
@@ -1,894 +1,906 @@
-"""
-"""
-
-# Created on 2014.01.06
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-from collections import namedtuple
-from copy import deepcopy
-from datetime import datetime
-from os import linesep
-from time import sleep
-
-from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE
-from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
-from ..abstract import STATUS_PENDING_CHANGES
-from .attribute import Attribute, OperationalAttribute, WritableAttribute
-from .attrDef import AttrDef
-from .objectDef import ObjectDef
-from .entry import Entry, WritableEntry
-from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError
-from ..core.results import RESULT_SUCCESS
-from ..utils.ciDict import CaseInsensitiveWithAliasDict
-from ..utils.dn import safe_dn, safe_rdn
-from ..utils.conv import to_raw
-from ..utils.config import get_config_parameter
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
-from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION
-
-Operation = namedtuple('Operation', ('request', 'result', 'response'))
-
-
-def _ret_search_value(value):
- return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value
-
-
-def _create_query_dict(query_text):
- """
- Create a dictionary with query key:value definitions
- query_text is a comma delimited key:value sequence
- """
- query_dict = dict()
- if query_text:
- for arg_value_str in query_text.split(','):
- if ':' in arg_value_str:
- arg_value_list = arg_value_str.split(':')
- query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip()
-
- return query_dict
-
-
-class Cursor(object):
- # entry_class and attribute_class define the type of entry and attribute used by the cursor
- # entry_initial_status defines the initial status of a entry
- # entry_class = Entry, must be defined in subclasses
- # attribute_class = Attribute, must be defined in subclasses
- # entry_initial_status = STATUS, must be defined in subclasses
-
- def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None):
- conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
- self.connection = connection
-
- if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind
- connection._fire_deferred()
-
- if isinstance(object_def, STRING_TYPES):
- object_def = ObjectDef(object_def, connection.server.schema)
- self.definition = object_def
- if attributes: # checks if requested attributes are defined in ObjectDef
- not_defined_attributes = []
- if isinstance(attributes, STRING_TYPES):
- attributes = [attributes]
-
- for attribute in attributes:
- if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def:
- not_defined_attributes.append(attribute)
-
- if not_defined_attributes:
- error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition])
- self.get_operational_attributes = get_operational_attributes
- self.controls = controls
- self.execution_time = None
- self.entries = []
- self.schema = self.connection.server.schema
- self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor
- self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation
-
- def __repr__(self):
- r = 'CURSOR : ' + self.__class__.__name__ + linesep
- r += 'CONN : ' + str(self.connection) + linesep
- r += 'DEFS : ' + repr(self.definition._object_class) + ' ['
- for attr_def in sorted(self.definition):
- r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', '
- if r[-2] == ',':
- r = r[:-2]
- r += ']' + linesep
- r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep
- if isinstance(self, Reader):
- r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep
- if self._query:
- r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
- if self.validated_query:
- r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
- if self.query_filter:
- r += 'FILTER : ' + repr(self.query_filter) + linesep
-
- if self.execution_time:
- r += 'ENTRIES: ' + str(len(self.entries))
- r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep
-
- if self.failed:
- r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']'
-
- return r
-
- def __str__(self):
- return self.__repr__()
-
- def __iter__(self):
- return self.entries.__iter__()
-
- def __getitem__(self, item):
- """Return indexed item, if index is not found then try to sequentially search in DN of entries.
- If only one entry is found return it else raise a KeyError exception. The exception message
- includes the number of entries that matches, if less than 10 entries match then show the DNs
- in the exception message.
- """
- try:
- return self.entries[item]
- except TypeError:
- pass
-
- if isinstance(item, STRING_TYPES):
- found = self.match_dn(item)
-
- if len(found) == 1:
- return found[0]
- elif len(found) > 1:
- error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']'))
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise KeyError(error_message)
-
- error_message = 'no entry found'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise KeyError(error_message)
-
- def __len__(self):
- return len(self.entries)
-
- if str is not bytes: # Python 3
- def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries
- return True
- else: # Python 2
- def __nonzero__(self):
- return True
-
- def _get_attributes(self, response, attr_defs, entry):
- """Assign the result of the LDAP query to the Entry object dictionary.
-
- If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute.
-
- Returns the default value for missing attributes.
- If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value.
-
- """
- conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
- conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
- attributes = CaseInsensitiveWithAliasDict()
- used_attribute_names = set()
- for attr in attr_defs:
- attr_def = attr_defs[attr]
- attribute_name = None
- for attr_name in response['attributes']:
- if attr_def.name.lower() == attr_name.lower():
- attribute_name = attr_name
- break
-
- if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default
- attribute = self.attribute_class(attr_def, entry, self)
- attribute.response = response
- attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None
- if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list():
- attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name])
- else:
- if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()):
- attribute.values = response['attributes'][attribute_name]
- else:
- attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default]
- if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued)
- attribute.values = [attribute.values]
- if attr_def.dereference_dn: # try to get object referenced in value
- if attribute.values:
- temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls)
- temp_values = []
- for element in attribute.values:
- if entry.entry_dn != element:
- temp_values.append(temp_reader.search_object(element))
- else:
- error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPObjectDereferenceError(error_message)
- del temp_reader # remove the temporary Reader
- attribute.values = temp_values
- attributes[attribute.key] = attribute
- if attribute.other_names:
- attributes.set_alias(attribute.key, attribute.other_names)
- if attr_def.other_names:
- attributes.set_alias(attribute.key, attr_def.other_names)
- used_attribute_names.add(attribute_name)
-
- if self.attributes:
- used_attribute_names.update(self.attributes)
-
- for attribute_name in response['attributes']:
- if attribute_name not in used_attribute_names:
- operational_attribute = False
- # check if the type is an operational attribute
- if attribute_name in self.schema.attribute_types:
- if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]:
- operational_attribute = True
- else:
- operational_attribute = True
- if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def:
- error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self)
- attribute.raw_values = response['raw_attributes'][attribute_name]
- attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]]
- if (conf_operational_attribute_prefix + attribute_name) not in attributes:
- attributes[conf_operational_attribute_prefix + attribute_name] = attribute
-
- return attributes
-
- def match_dn(self, dn):
- """Return entries with text in DN"""
- matched = []
- for entry in self.entries:
- if dn.lower() in entry.entry_dn.lower():
- matched.append(entry)
- return matched
-
- def match(self, attributes, value):
- """Return entries with text in one of the specified attributes"""
- matched = []
- if not isinstance(attributes, SEQUENCE_TYPES):
- attributes = [attributes]
-
- for entry in self.entries:
- found = False
- for attribute in attributes:
- if attribute in entry:
- for attr_value in entry[attribute].values:
- if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower():
- found = True
- elif value == attr_value:
- found = True
- if found:
- matched.append(entry)
- break
- if found:
- break
- # checks raw values, tries to convert value to byte
- raw_value = to_raw(value)
- if isinstance(raw_value, (bytes, bytearray)):
- for attr_value in entry[attribute].raw_values:
- if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower():
- found = True
- elif raw_value == attr_value:
- found = True
- if found:
- matched.append(entry)
- break
- if found:
- break
- return matched
-
- def _create_entry(self, response):
- if not response['type'] == 'searchResEntry':
- return None
-
- entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition
- entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry)
- entry._state.entry_raw_attributes = deepcopy(response['raw_attributes'])
-
- entry._state.response = response
- entry._state.read_time = datetime.now()
- entry._state.set_status(self.entry_initial_status)
- for attr in entry: # returns the whole attribute object
- entry.__dict__[attr.key] = attr
-
- return entry
-
- def _execute_query(self, query_scope, attributes):
- if not self.connection:
- error_message = 'no connection established'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- old_query_filter = None
- if query_scope == BASE: # requesting a single object so an always-valid filter is set
- if hasattr(self, 'query_filter'): # only Reader has a query filter
- old_query_filter = self.query_filter
- self.query_filter = '(objectclass=*)'
- else:
- self._create_query_filter()
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self)
- with self.connection:
- result = self.connection.search(search_base=self.base,
- search_filter=self.query_filter,
- search_scope=query_scope,
- dereference_aliases=self.dereference_aliases,
- attributes=attributes if attributes else list(self.attributes),
- get_operational_attributes=self.get_operational_attributes,
- controls=self.controls)
- if not self.connection.strategy.sync:
- response, result, request = self.connection.get_response(result, get_request=True)
- else:
- response = self.connection.response
- result = self.connection.result
- request = self.connection.request
-
- self._store_operation_in_history(request, result, response)
-
- if self._do_not_reset: # trick to not remove entries when using _refresh()
- return self._create_entry(response[0])
-
- self.entries = []
- for r in response:
- entry = self._create_entry(r)
- if entry is not None:
- self.entries.append(entry)
-
- self.execution_time = datetime.now()
-
- if old_query_filter: # requesting a single object so an always-valid filter is set
- self.query_filter = old_query_filter
-
- def remove(self, entry):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self)
- self.entries.remove(entry)
-
- def _reset_history(self):
- self._operation_history = list()
-
- def _store_operation_in_history(self, request, result, response):
- self._operation_history.append(Operation(request, result, response))
-
- @property
- def operations(self):
- return self._operation_history
-
- @property
- def errors(self):
- return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS]
-
- @property
- def failed(self):
- return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history])
-
-
-class Reader(Cursor):
- """Reader object to perform searches:
-
- :param connection: the LDAP connection object to use
- :type connection: LDAPConnection
- :param object_def: the ObjectDef of the LDAP object returned
- :type object_def: ObjectDef
- :param query: the simplified query (will be transformed in an LDAP filter)
- :type query: str
- :param base: starting base of the search
- :type base: str
- :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR)
- :type components_in_and: bool
- :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True)
- :type sub_tree: bool
- :param get_operational_attributes: specify if operational attributes are returned or not
- :type get_operational_attributes: bool
- :param controls: controls to be used in search
- :type controls: tuple
-
- """
- entry_class = Entry # entries are read_only
- attribute_class = Attribute # attributes are read_only
- entry_initial_status = STATUS_READ
-
- def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None):
- Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls)
- self._components_in_and = components_in_and
- self.sub_tree = sub_tree
- self._query = query
- self.base = base
- self.dereference_aliases = DEREF_ALWAYS
- self.validated_query = None
- self._query_dict = dict()
- self._validated_query_dict = dict()
- self.query_filter = None
- self.reset()
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Reader Cursor: <%r>', self)
-
- @property
- def query(self):
- return self._query
-
- @query.setter
- def query(self, value):
- self._query = value
- self.reset()
-
- @property
- def components_in_and(self):
- return self._components_in_and
-
- @components_in_and.setter
- def components_in_and(self, value):
- self._components_in_and = value
- self.reset()
-
- def clear(self):
- """Clear the Reader search parameters
-
- """
- self.dereference_aliases = DEREF_ALWAYS
- self._reset_history()
-
- def reset(self):
- """Clear all the Reader parameters
-
- """
- self.clear()
- self.validated_query = None
- self._query_dict = dict()
- self._validated_query_dict = dict()
- self.execution_time = None
- self.query_filter = None
- self.entries = []
- self._create_query_filter()
-
- def _validate_query(self):
- """Processes the text query and verifies that the requested friendly names are in the Reader dictionary
- If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised
-
- """
- if not self._query_dict:
- self._query_dict = _create_query_dict(self._query)
-
- query = ''
- for d in sorted(self._query_dict):
- attr = d[1:] if d[0] in '&|' else d
- for attr_def in self.definition:
- if ''.join(attr.split()).lower() == attr_def.key.lower():
- attr = attr_def.key
- break
- if attr in self.definition:
- vals = sorted(self._query_dict[d].split(';'))
-
- query += (d[0] + attr if d[0] in '&|' else attr) + ': '
- for val in vals:
- val = val.strip()
- val_not = True if val[0] == '!' else False
- val_search_operator = '=' # default
- if val_not:
- if val[1:].lstrip()[0] not in '=<>~':
- value = val[1:].lstrip()
- else:
- val_search_operator = val[1:].lstrip()[0]
- value = val[1:].lstrip()[1:]
- else:
- if val[0] not in '=<>~':
- value = val.lstrip()
- else:
- val_search_operator = val[0]
- value = val[1:].lstrip()
-
- if self.definition[attr].validate:
- validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values
- if validated is False:
- error_message = 'validation failed for attribute %s and value %s' % (d, val)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- elif validated is not True: # a valid LDAP value equivalent to the actual values
- value = validated
- if val_not:
- query += '!' + val_search_operator + str(value)
- else:
- query += val_search_operator + str(value)
-
- query += ';'
- query = query[:-1] + ', '
- else:
- error_message = 'attribute \'%s\' not in definition' % attr
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self.validated_query = query[:-2]
- self._validated_query_dict = _create_query_dict(self.validated_query)
-
- def _create_query_filter(self):
- """Converts the query dictionary to the filter text"""
- self.query_filter = ''
-
- if self.definition._object_class:
- self.query_filter += '(&'
- if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1:
- self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')'
- elif isinstance(self.definition._object_class, SEQUENCE_TYPES):
- self.query_filter += '(&'
- for object_class in self.definition._object_class:
- self.query_filter += '(objectClass=' + object_class + ')'
- self.query_filter += ')'
- else:
- error_message = 'object class must be a string or a list'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter
- if 'objectclass' not in self._query.lower():
- self.query_filter += self._query + ')' # if objectclass not in filter adds from definition
- else:
- self.query_filter = self._query
- return
- elif self._query: # if a simplified filter is present
- if not self.components_in_and:
- self.query_filter += '(|'
- elif not self.definition._object_class:
- self.query_filter += '(&'
-
- self._validate_query()
-
- attr_counter = 0
- for attr in sorted(self._validated_query_dict):
- attr_counter += 1
- multi = True if ';' in self._validated_query_dict[attr] else False
- vals = sorted(self._validated_query_dict[attr].split(';'))
- attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr]
- if attr_def.pre_query:
- modvals = []
- for val in vals:
- modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:]))
- vals = modvals
- if multi:
- if attr[0] in '&|':
- self.query_filter += '(' + attr[0]
- else:
- self.query_filter += '(|'
-
- for val in vals:
- if val[0] == '!':
- self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))'
- else:
- self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')'
- if multi:
- self.query_filter += ')'
-
- if not self.components_in_and:
- self.query_filter += '))'
- else:
- self.query_filter += ')'
-
- if not self.definition._object_class and attr_counter == 1: # remove unneeded starting filter
- self.query_filter = self.query_filter[2: -1]
-
- if self.query_filter == '(|)' or self.query_filter == '(&)': # remove empty filter
- self.query_filter = ''
- else: # no query, remove unneeded leading (&
- self.query_filter = self.query_filter[2:]
-
- def search(self, attributes=None):
- """Perform the LDAP search
-
- :return: Entries found in search
-
- """
- self.clear()
- query_scope = SUBTREE if self.sub_tree else LEVEL
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing search in <%s>', self)
- self._execute_query(query_scope, attributes)
-
- return self.entries
-
- def search_object(self, entry_dn=None, attributes=None): # base must be a single dn
- """Perform the LDAP search operation SINGLE_OBJECT scope
-
- :return: Entry found in search
-
- """
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing object search in <%s>', self)
- self.clear()
- if entry_dn:
- old_base = self.base
- self.base = entry_dn
- self._execute_query(BASE, attributes)
- self.base = old_base
- else:
- self._execute_query(BASE, attributes)
-
- return self.entries[0] if len(self.entries) > 0 else None
-
- def search_level(self, attributes=None):
- """Perform the LDAP search operation with SINGLE_LEVEL scope
-
- :return: Entries found in search
-
- """
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing single level search in <%s>', self)
- self.clear()
- self._execute_query(LEVEL, attributes)
-
- return self.entries
-
- def search_subtree(self, attributes=None):
- """Perform the LDAP search operation WHOLE_SUBTREE scope
-
- :return: Entries found in search
-
- """
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing whole subtree search in <%s>', self)
- self.clear()
- self._execute_query(SUBTREE, attributes)
-
- return self.entries
-
- def _entries_generator(self, responses):
- for response in responses:
- yield self._create_entry(response)
-
- def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None):
- """Perform a paged search, can be called as an Iterator
-
- :param attributes: optional attributes to search
- :param paged_size: number of entries returned in each search
- :type paged_size: int
- :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches
- :type paged_criticality: bool
- :param generator: if True the paged searches are executed while generating the entries,
- if False all the paged searches are execute before returning the generator
- :type generator: bool
- :return: Entries found in search
-
- """
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size))
- if not self.connection:
- error_message = 'no connection established'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- self.clear()
- self._create_query_filter()
- self.entries = []
- self.execution_time = datetime.now()
- response = self.connection.extend.standard.paged_search(search_base=self.base,
- search_filter=self.query_filter,
- search_scope=SUBTREE if self.sub_tree else LEVEL,
- dereference_aliases=self.dereference_aliases,
- attributes=attributes if attributes else self.attributes,
- get_operational_attributes=self.get_operational_attributes,
- controls=self.controls,
- paged_size=paged_size,
- paged_criticality=paged_criticality,
- generator=generator)
- if generator:
- return self._entries_generator(response)
- else:
- return list(self._entries_generator(response))
-
-
-class Writer(Cursor):
- entry_class = WritableEntry
- attribute_class = WritableAttribute
- entry_initial_status = STATUS_WRITABLE
-
- @staticmethod
- def from_cursor(cursor, connection=None, object_def=None, custom_validator=None):
- if connection is None:
- connection = cursor.connection
- if object_def is None:
- object_def = cursor.definition
- writer = Writer(connection, object_def, attributes=cursor.attributes)
- for entry in cursor.entries:
- if isinstance(cursor, Reader):
- entry.entry_writable(object_def, writer, custom_validator=custom_validator)
- elif isinstance(cursor, Writer):
- pass
- else:
- error_message = 'unknown cursor type %s' % str(type(cursor))
- if log_enabled(ERROR):
- log(ERROR, '%s', error_message)
- raise LDAPCursorError(error_message)
- writer.execution_time = cursor.execution_time
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor)
- return writer
-
- @staticmethod
- def from_response(connection, object_def, response=None):
- if response is None:
- if not connection.strategy.sync:
- error_message = 'with asynchronous strategies response must be specified'
- if log_enabled(ERROR):
- log(ERROR, '%s', error_message)
- raise LDAPCursorError(error_message)
- elif connection.response:
- response = connection.response
- else:
- error_message = 'response not present'
- if log_enabled(ERROR):
- log(ERROR, '%s', error_message)
- raise LDAPCursorError(error_message)
- writer = Writer(connection, object_def)
-
- for resp in response:
- if resp['type'] == 'searchResEntry':
- entry = writer._create_entry(resp)
- writer.entries.append(entry)
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Writer Cursor <%r> from response', writer)
- return writer
-
- def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None):
- Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls)
- self.dereference_aliases = DEREF_NEVER
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Writer Cursor: <%r>', self)
-
- def commit(self, refresh=True):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'committed changes for <%s>', self)
- self._reset_history()
- successful = True
- for entry in self.entries:
- if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False):
- successful = False
-
- self.execution_time = datetime.now()
-
- return successful
-
- def discard(self):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'discarded changes for <%s>', self)
- for entry in self.entries:
- entry.entry_discard_changes()
-
- def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn
- """Performs the LDAP search operation SINGLE_OBJECT scope
-
- :return: Entry found in search
-
- """
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self)
- if not self.connection:
- error_message = 'no connection established'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- response = []
- with self.connection:
- counter = 0
- while counter < tries:
- result = self.connection.search(search_base=entry_dn,
- search_filter='(objectclass=*)',
- search_scope=BASE,
- dereference_aliases=DEREF_NEVER,
- attributes=attributes if attributes else self.attributes,
- get_operational_attributes=self.get_operational_attributes,
- controls=controls)
- if not self.connection.strategy.sync:
- response, result, request = self.connection.get_response(result, get_request=True)
- else:
- response = self.connection.response
- result = self.connection.result
- request = self.connection.request
-
- if result['result'] in [RESULT_SUCCESS]:
- break
- sleep(seconds)
- counter += 1
- self._store_operation_in_history(request, result, response)
-
- if len(response) == 1:
- return self._create_entry(response[0])
- elif len(response) == 0:
- return None
-
- error_message = 'more than 1 entry returned for a single object search'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- def new(self, dn):
- if log_enabled(BASIC):
- log(BASIC, 'creating new entry <%s> for <%s>', dn, self)
- dn = safe_dn(dn)
- for entry in self.entries: # checks if dn is already used in an cursor entry
- if entry.entry_dn == dn:
- error_message = 'dn already present in cursor'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- rdns = safe_rdn(dn, decompose=True)
- entry = self.entry_class(dn, self) # defines a new empty Entry
- for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual
- entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self)
- entry.__dict__[attr] = entry._state.attributes[attr]
- entry.objectclass.set(self.definition._object_class)
- for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax)
- if rdn[0] in entry._state.definition._attributes:
- rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding
- if rdn_name not in entry._state.attributes:
- entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self)
- entry.__dict__[rdn_name] = entry._state.attributes[rdn_name]
- entry.__dict__[rdn_name].set(rdn[1])
- else:
- error_message = 'rdn type \'%s\' not in object class definition' % rdn[0]
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- entry._state.set_status(STATUS_VIRTUAL) # set intial status
- entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING
- self.entries.append(entry)
- return entry
-
- def refresh_entry(self, entry, tries=4, seconds=2):
- conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
-
- self._do_not_reset = True
- attr_list = []
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self)
- for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes
- if attr.lower().startswith(conf_operational_attribute_prefix.lower()):
- continue
- if entry._state.definition[attr].name:
- attr_list.append(entry._state.definition[attr].name)
- else:
- attr_list.append(entry._state.definition[attr].key)
-
- temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition
- self._do_not_reset = False
- if temp_entry:
- temp_entry._state.origin = entry._state.origin
- entry.__dict__.clear()
- entry.__dict__['_state'] = temp_entry._state
- for attr in entry._state.attributes: # returns the attribute key
- entry.__dict__[attr] = entry._state.attributes[attr]
-
- for attr in entry.entry_attributes: # if any attribute of the class was deleted make it virtual
- if attr not in entry._state.attributes and attr in entry.entry_definition._attributes:
- entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self)
- entry.__dict__[attr] = entry._state.attributes[attr]
- entry._state.set_status(entry._state._initial_status)
- return True
- return False
+"""
+"""
+
+# Created on 2014.01.06
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+from collections import namedtuple
+from copy import deepcopy
+from datetime import datetime
+from os import linesep
+from time import sleep
+
+from . import STATUS_VIRTUAL, STATUS_READ, STATUS_WRITABLE
+from .. import SUBTREE, LEVEL, DEREF_ALWAYS, DEREF_NEVER, BASE, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
+from ..abstract import STATUS_PENDING_CHANGES
+from .attribute import Attribute, OperationalAttribute, WritableAttribute
+from .attrDef import AttrDef
+from .objectDef import ObjectDef
+from .entry import Entry, WritableEntry
+from ..core.exceptions import LDAPCursorError, LDAPObjectDereferenceError
+from ..core.results import RESULT_SUCCESS
+from ..utils.ciDict import CaseInsensitiveWithAliasDict
+from ..utils.dn import safe_dn, safe_rdn
+from ..utils.conv import to_raw
+from ..utils.config import get_config_parameter
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
+from ..protocol.oid import ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION, CLASS_AUXILIARY
+
+Operation = namedtuple('Operation', ('request', 'result', 'response'))
+
+
+def _ret_search_value(value):
+ return value[0] + '=' + value[1:] if value[0] in '<>~' and value[1] != '=' else value
+
+
+def _create_query_dict(query_text):
+ """
+ Create a dictionary with query key:value definitions
+ query_text is a comma delimited key:value sequence
+ """
+ query_dict = dict()
+ if query_text:
+ for arg_value_str in query_text.split(','):
+ if ':' in arg_value_str:
+ arg_value_list = arg_value_str.split(':')
+ query_dict[arg_value_list[0].strip()] = arg_value_list[1].strip()
+
+ return query_dict
+
+
+class Cursor(object):
+ # entry_class and attribute_class define the type of entry and attribute used by the cursor
+ # entry_initial_status defines the initial status of a entry
+ # entry_class = Entry, must be defined in subclasses
+ # attribute_class = Attribute, must be defined in subclasses
+ # entry_initial_status = STATUS, must be defined in subclasses
+
+ def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
+ conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
+ self.connection = connection
+ self.get_operational_attributes = get_operational_attributes
+ if connection._deferred_bind or connection._deferred_open: # probably a lazy connection, tries to bind
+ connection._fire_deferred()
+
+ if isinstance(object_def, (STRING_TYPES, SEQUENCE_TYPES)):
+ if connection.closed: # try to open connection if closed to read schema
+ connection.bind()
+ object_def = ObjectDef(object_def, connection.server.schema, auxiliary_class=auxiliary_class)
+ self.definition = object_def
+ if attributes: # checks if requested attributes are defined in ObjectDef
+ not_defined_attributes = []
+ if isinstance(attributes, STRING_TYPES):
+ attributes = [attributes]
+
+ for attribute in attributes:
+ if attribute not in self.definition._attributes and attribute.lower() not in conf_attributes_excluded_from_object_def:
+ not_defined_attributes.append(attribute)
+
+ if not_defined_attributes:
+ error_message = 'Attributes \'%s\' non in definition' % ', '.join(not_defined_attributes)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ self.attributes = set(attributes) if attributes else set([attr.name for attr in self.definition])
+ self.controls = controls
+ self.execution_time = None
+ self.entries = []
+ self.schema = self.connection.server.schema
+ self._do_not_reset = False # used for refreshing entry in entry_refresh() without removing all entries from the Cursor
+ self._operation_history = list() # a list storing all the requests, results and responses for the last cursor operation
+
+ def __repr__(self):
+ r = 'CURSOR : ' + self.__class__.__name__ + linesep
+ r += 'CONN : ' + str(self.connection) + linesep
+ r += 'DEFS : ' + ', '.join(self.definition._object_class)
+ if self.definition._auxiliary_class:
+ r += ' [AUX: ' + ', '.join(self.definition._auxiliary_class) + ']'
+ r += linesep
+ # for attr_def in sorted(self.definition):
+ # r += (attr_def.key if attr_def.key == attr_def.name else (attr_def.key + ' <' + attr_def.name + '>')) + ', '
+ # if r[-2] == ',':
+ # r = r[:-2]
+ # r += ']' + linesep
+ if hasattr(self, 'attributes'):
+ r += 'ATTRS : ' + repr(sorted(self.attributes)) + (' [OPERATIONAL]' if self.get_operational_attributes else '') + linesep
+ if isinstance(self, Reader):
+ if hasattr(self, 'base'):
+ r += 'BASE : ' + repr(self.base) + (' [SUB]' if self.sub_tree else ' [LEVEL]') + linesep
+ if hasattr(self, '_query') and self._query:
+ r += 'QUERY : ' + repr(self._query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
+ if hasattr(self, 'validated_query') and self.validated_query:
+ r += 'PARSED : ' + repr(self.validated_query) + ('' if '(' in self._query else (' [AND]' if self.components_in_and else ' [OR]')) + linesep
+ if hasattr(self, 'query_filter') and self.query_filter:
+ r += 'FILTER : ' + repr(self.query_filter) + linesep
+
+ if hasattr(self, 'execution_time') and self.execution_time:
+ r += 'ENTRIES: ' + str(len(self.entries))
+ r += ' [executed at: ' + str(self.execution_time.isoformat()) + ']' + linesep
+
+ if self.failed:
+ r += 'LAST OPERATION FAILED [' + str(len(self.errors)) + ' failure' + ('s' if len(self.errors) > 1 else '') + ' at operation' + ('s ' if len(self.errors) > 1 else ' ') + ', '.join([str(i) for i, error in enumerate(self.operations) if error.result['result'] != RESULT_SUCCESS]) + ']'
+
+ return r
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __iter__(self):
+ return self.entries.__iter__()
+
+ def __getitem__(self, item):
+ """Return indexed item, if index is not found then try to sequentially search in DN of entries.
+ If only one entry is found return it else raise a KeyError exception. The exception message
+ includes the number of entries that matches, if less than 10 entries match then show the DNs
+ in the exception message.
+ """
+ try:
+ return self.entries[item]
+ except TypeError:
+ pass
+
+ if isinstance(item, STRING_TYPES):
+ found = self.match_dn(item)
+
+ if len(found) == 1:
+ return found[0]
+ elif len(found) > 1:
+ error_message = 'Multiple entries found: %d entries match the text in dn' % len(found) + ('' if len(found) > 10 else (' [' + '; '.join([e.entry_dn for e in found]) + ']'))
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise KeyError(error_message)
+
+ error_message = 'no entry found'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise KeyError(error_message)
+
+ def __len__(self):
+ return len(self.entries)
+
+ if str is not bytes: # Python 3
+ def __bool__(self): # needed to make the cursor appears as existing in "if cursor:" even if there are no entries
+ return True
+ else: # Python 2
+ def __nonzero__(self):
+ return True
+
+ def _get_attributes(self, response, attr_defs, entry):
+ """Assign the result of the LDAP query to the Entry object dictionary.
+
+ If the optional 'post_query' callable is present in the AttrDef it is called with each value of the attribute and the callable result is stored in the attribute.
+
+ Returns the default value for missing attributes.
+ If the 'dereference_dn' in AttrDef is a ObjectDef then the attribute values are treated as distinguished name and the relevant entry is retrieved and stored in the attribute value.
+
+ """
+ conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
+ conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
+ attributes = CaseInsensitiveWithAliasDict()
+ used_attribute_names = set()
+ for attr in attr_defs:
+ attr_def = attr_defs[attr]
+ attribute_name = None
+ for attr_name in response['attributes']:
+ if attr_def.name.lower() == attr_name.lower():
+ attribute_name = attr_name
+ break
+
+ if attribute_name or attr_def.default is not NotImplemented: # attribute value found in result or default value present - NotImplemented allows use of None as default
+ attribute = self.attribute_class(attr_def, entry, self)
+ attribute.response = response
+ attribute.raw_values = response['raw_attributes'][attribute_name] if attribute_name else None
+ if attr_def.post_query and attr_def.name in response['attributes'] and response['raw_attributes'] != list():
+ attribute.values = attr_def.post_query(attr_def.key, response['attributes'][attribute_name])
+ else:
+ if attr_def.default is NotImplemented or (attribute_name and response['raw_attributes'][attribute_name] != list()):
+ attribute.values = response['attributes'][attribute_name]
+ else:
+ attribute.values = attr_def.default if isinstance(attr_def.default, SEQUENCE_TYPES) else [attr_def.default]
+ if not isinstance(attribute.values, list): # force attribute values to list (if attribute is single-valued)
+ attribute.values = [attribute.values]
+ if attr_def.dereference_dn: # try to get object referenced in value
+ if attribute.values:
+ temp_reader = Reader(self.connection, attr_def.dereference_dn, base='', get_operational_attributes=self.get_operational_attributes, controls=self.controls)
+ temp_values = []
+ for element in attribute.values:
+ if entry.entry_dn != element:
+ temp_values.append(temp_reader.search_object(element))
+ else:
+ error_message = 'object %s is referencing itself in the \'%s\' attribute' % (entry.entry_dn, attribute.definition.name)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPObjectDereferenceError(error_message)
+ del temp_reader # remove the temporary Reader
+ attribute.values = temp_values
+ attributes[attribute.key] = attribute
+ if attribute.other_names:
+ attributes.set_alias(attribute.key, attribute.other_names)
+ if attr_def.other_names:
+ attributes.set_alias(attribute.key, attr_def.other_names)
+ used_attribute_names.add(attribute_name)
+
+ if self.attributes:
+ used_attribute_names.update(self.attributes)
+
+ for attribute_name in response['attributes']:
+ if attribute_name not in used_attribute_names:
+ operational_attribute = False
+ # check if the type is an operational attribute
+ if attribute_name in self.schema.attribute_types:
+ if self.schema.attribute_types[attribute_name].no_user_modification or self.schema.attribute_types[attribute_name].usage in [ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION]:
+ operational_attribute = True
+ else:
+ operational_attribute = True
+ if not operational_attribute and attribute_name not in attr_defs and attribute_name.lower() not in conf_attributes_excluded_from_object_def:
+ error_message = 'attribute \'%s\' not in object class \'%s\' for entry %s' % (attribute_name, ', '.join(entry.entry_definition._object_class), entry.entry_dn)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ attribute = OperationalAttribute(AttrDef(conf_operational_attribute_prefix + attribute_name), entry, self)
+ attribute.raw_values = response['raw_attributes'][attribute_name]
+ attribute.values = response['attributes'][attribute_name] if isinstance(response['attributes'][attribute_name], SEQUENCE_TYPES) else [response['attributes'][attribute_name]]
+ if (conf_operational_attribute_prefix + attribute_name) not in attributes:
+ attributes[conf_operational_attribute_prefix + attribute_name] = attribute
+
+ return attributes
+
+ def match_dn(self, dn):
+ """Return entries with text in DN"""
+ matched = []
+ for entry in self.entries:
+ if dn.lower() in entry.entry_dn.lower():
+ matched.append(entry)
+ return matched
+
+ def match(self, attributes, value):
+ """Return entries with text in one of the specified attributes"""
+ matched = []
+ if not isinstance(attributes, SEQUENCE_TYPES):
+ attributes = [attributes]
+
+ for entry in self.entries:
+ found = False
+ for attribute in attributes:
+ if attribute in entry:
+ for attr_value in entry[attribute].values:
+ if hasattr(attr_value, 'lower') and hasattr(value, 'lower') and value.lower() in attr_value.lower():
+ found = True
+ elif value == attr_value:
+ found = True
+ if found:
+ matched.append(entry)
+ break
+ if found:
+ break
+ # checks raw values, tries to convert value to byte
+ raw_value = to_raw(value)
+ if isinstance(raw_value, (bytes, bytearray)):
+ for attr_value in entry[attribute].raw_values:
+ if hasattr(attr_value, 'lower') and hasattr(raw_value, 'lower') and raw_value.lower() in attr_value.lower():
+ found = True
+ elif raw_value == attr_value:
+ found = True
+ if found:
+ matched.append(entry)
+ break
+ if found:
+ break
+ return matched
+
+ def _create_entry(self, response):
+ if not response['type'] == 'searchResEntry':
+ return None
+
+ entry = self.entry_class(response['dn'], self) # define an Entry (writable or readonly), as specified in the cursor definition
+ entry._state.attributes = self._get_attributes(response, self.definition._attributes, entry)
+ entry._state.raw_attributes = deepcopy(response['raw_attributes'])
+
+ entry._state.response = response
+ entry._state.read_time = datetime.now()
+ entry._state.set_status(self.entry_initial_status)
+ for attr in entry: # returns the whole attribute object
+ entry.__dict__[attr.key] = attr
+
+ return entry
+
+ def _execute_query(self, query_scope, attributes):
+ if not self.connection:
+ error_message = 'no connection established'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ old_query_filter = None
+ if query_scope == BASE: # requesting a single object so an always-valid filter is set
+ if hasattr(self, 'query_filter'): # only Reader has a query filter
+ old_query_filter = self.query_filter
+ self.query_filter = '(objectclass=*)'
+ else:
+ self._create_query_filter()
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'executing query - base: %s - filter: %s - scope: %s for <%s>', self.base, self.query_filter, query_scope, self)
+ with self.connection:
+ result = self.connection.search(search_base=self.base,
+ search_filter=self.query_filter,
+ search_scope=query_scope,
+ dereference_aliases=self.dereference_aliases,
+ attributes=attributes if attributes else list(self.attributes),
+ get_operational_attributes=self.get_operational_attributes,
+ controls=self.controls)
+ if not self.connection.strategy.sync:
+ response, result, request = self.connection.get_response(result, get_request=True)
+ else:
+ response = self.connection.response
+ result = self.connection.result
+ request = self.connection.request
+
+ self._store_operation_in_history(request, result, response)
+
+ if self._do_not_reset: # trick to not remove entries when using _refresh()
+ return self._create_entry(response[0])
+
+ self.entries = []
+ for r in response:
+ entry = self._create_entry(r)
+ if entry is not None:
+ self.entries.append(entry)
+ if 'objectClass' in entry:
+ for object_class in entry.objectClass:
+ if self.schema and self.schema.object_classes[object_class].kind == CLASS_AUXILIARY and object_class not in self.definition._auxiliary_class:
+ # add auxiliary class to object definition
+ self.definition._auxiliary_class.append(object_class)
+ self.definition._populate_attr_defs(object_class)
+ self.execution_time = datetime.now()
+
+ if old_query_filter: # requesting a single object so an always-valid filter is set
+ self.query_filter = old_query_filter
+
+ def remove(self, entry):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'removing entry <%s> in <%s>', entry, self)
+ self.entries.remove(entry)
+
+ def _reset_history(self):
+ self._operation_history = list()
+
+ def _store_operation_in_history(self, request, result, response):
+ self._operation_history.append(Operation(request, result, response))
+
+ @property
+ def operations(self):
+ return self._operation_history
+
+ @property
+ def errors(self):
+ return [error for error in self._operation_history if error.result['result'] != RESULT_SUCCESS]
+
+ @property
+ def failed(self):
+ if hasattr(self, '_operation_history'):
+ return any([error.result['result'] != RESULT_SUCCESS for error in self._operation_history])
+
+
+class Reader(Cursor):
+ """Reader object to perform searches:
+
+ :param connection: the LDAP connection object to use
+ :type connection: LDAPConnection
+ :param object_def: the ObjectDef of the LDAP object returned
+ :type object_def: ObjectDef
+ :param query: the simplified query (will be transformed in an LDAP filter)
+ :type query: str
+ :param base: starting base of the search
+ :type base: str
+ :param components_in_and: specify if assertions in the query must all be satisfied or not (AND/OR)
+ :type components_in_and: bool
+ :param sub_tree: specify if the search must be performed ad Single Level (False) or Whole SubTree (True)
+ :type sub_tree: bool
+ :param get_operational_attributes: specify if operational attributes are returned or not
+ :type get_operational_attributes: bool
+ :param controls: controls to be used in search
+ :type controls: tuple
+
+ """
+ entry_class = Entry # entries are read_only
+ attribute_class = Attribute # attributes are read_only
+ entry_initial_status = STATUS_READ
+
+ def __init__(self, connection, object_def, base, query='', components_in_and=True, sub_tree=True, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
+ Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
+ self._components_in_and = components_in_and
+ self.sub_tree = sub_tree
+ self._query = query
+ self.base = base
+ self.dereference_aliases = DEREF_ALWAYS
+ self.validated_query = None
+ self._query_dict = dict()
+ self._validated_query_dict = dict()
+ self.query_filter = None
+ self.reset()
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Reader Cursor: <%r>', self)
+
+ @property
+ def query(self):
+ return self._query
+
+ @query.setter
+ def query(self, value):
+ self._query = value
+ self.reset()
+
+ @property
+ def components_in_and(self):
+ return self._components_in_and
+
+ @components_in_and.setter
+ def components_in_and(self, value):
+ self._components_in_and = value
+ self.reset()
+
+ def clear(self):
+ """Clear the Reader search parameters
+
+ """
+ self.dereference_aliases = DEREF_ALWAYS
+ self._reset_history()
+
+ def reset(self):
+ """Clear all the Reader parameters
+
+ """
+ self.clear()
+ self.validated_query = None
+ self._query_dict = dict()
+ self._validated_query_dict = dict()
+ self.execution_time = None
+ self.query_filter = None
+ self.entries = []
+ self._create_query_filter()
+
+ def _validate_query(self):
+ """Processes the text query and verifies that the requested friendly names are in the Reader dictionary
+ If the AttrDef has a 'validate' property the callable is executed and if it returns False an Exception is raised
+
+ """
+ if not self._query_dict:
+ self._query_dict = _create_query_dict(self._query)
+
+ query = ''
+ for d in sorted(self._query_dict):
+ attr = d[1:] if d[0] in '&|' else d
+ for attr_def in self.definition:
+ if ''.join(attr.split()).lower() == attr_def.key.lower():
+ attr = attr_def.key
+ break
+ if attr in self.definition:
+ vals = sorted(self._query_dict[d].split(';'))
+
+ query += (d[0] + attr if d[0] in '&|' else attr) + ': '
+ for val in vals:
+ val = val.strip()
+ val_not = True if val[0] == '!' else False
+ val_search_operator = '=' # default
+ if val_not:
+ if val[1:].lstrip()[0] not in '=<>~':
+ value = val[1:].lstrip()
+ else:
+ val_search_operator = val[1:].lstrip()[0]
+ value = val[1:].lstrip()[1:]
+ else:
+ if val[0] not in '=<>~':
+ value = val.lstrip()
+ else:
+ val_search_operator = val[0]
+ value = val[1:].lstrip()
+
+ if self.definition[attr].validate:
+ validated = self.definition[attr].validate(value) # returns True, False or a value to substitute to the actual values
+ if validated is False:
+ error_message = 'validation failed for attribute %s and value %s' % (d, val)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ elif validated is not True: # a valid LDAP value equivalent to the actual values
+ value = validated
+ if val_not:
+ query += '!' + val_search_operator + str(value)
+ else:
+ query += val_search_operator + str(value)
+
+ query += ';'
+ query = query[:-1] + ', '
+ else:
+ error_message = 'attribute \'%s\' not in definition' % attr
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self.validated_query = query[:-2]
+ self._validated_query_dict = _create_query_dict(self.validated_query)
+
+ def _create_query_filter(self):
+ """Converts the query dictionary to the filter text"""
+ self.query_filter = ''
+
+ if self.definition._object_class:
+ self.query_filter += '(&'
+ if isinstance(self.definition._object_class, SEQUENCE_TYPES) and len(self.definition._object_class) == 1:
+ self.query_filter += '(objectClass=' + self.definition._object_class[0] + ')'
+ elif isinstance(self.definition._object_class, SEQUENCE_TYPES):
+ self.query_filter += '(&'
+ for object_class in self.definition._object_class:
+ self.query_filter += '(objectClass=' + object_class + ')'
+ self.query_filter += ')'
+ else:
+ error_message = 'object class must be a string or a list'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ if self._query and self._query.startswith('(') and self._query.endswith(')'): # query is already an LDAP filter
+ if 'objectclass' not in self._query.lower():
+ self.query_filter += self._query + ')' # if objectclass not in filter adds from definition
+ else:
+ self.query_filter = self._query
+ return
+ elif self._query: # if a simplified filter is present
+ if not self.components_in_and:
+ self.query_filter += '(|'
+ elif not self.definition._object_class:
+ self.query_filter += '(&'
+
+ self._validate_query()
+
+ attr_counter = 0
+ for attr in sorted(self._validated_query_dict):
+ attr_counter += 1
+ multi = True if ';' in self._validated_query_dict[attr] else False
+ vals = sorted(self._validated_query_dict[attr].split(';'))
+ attr_def = self.definition[attr[1:]] if attr[0] in '&|' else self.definition[attr]
+ if attr_def.pre_query:
+ modvals = []
+ for val in vals:
+ modvals.append(val[0] + attr_def.pre_query(attr_def.key, val[1:]))
+ vals = modvals
+ if multi:
+ if attr[0] in '&|':
+ self.query_filter += '(' + attr[0]
+ else:
+ self.query_filter += '(|'
+
+ for val in vals:
+ if val[0] == '!':
+ self.query_filter += '(!(' + attr_def.name + _ret_search_value(val[1:]) + '))'
+ else:
+ self.query_filter += '(' + attr_def.name + _ret_search_value(val) + ')'
+ if multi:
+ self.query_filter += ')'
+
+ if not self.components_in_and:
+ self.query_filter += '))'
+ else:
+ self.query_filter += ')'
+
+ if not self.definition._object_class and attr_counter == 1: # removes unneeded starting filter
+ self.query_filter = self.query_filter[2: -1]
+
+ if self.query_filter == '(|)' or self.query_filter == '(&)': # removes empty filter
+ self.query_filter = ''
+ else: # no query, remove unneeded leading (&
+ self.query_filter = self.query_filter[2:]
+
+ def search(self, attributes=None):
+ """Perform the LDAP search
+
+ :return: Entries found in search
+
+ """
+ self.clear()
+ query_scope = SUBTREE if self.sub_tree else LEVEL
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing search in <%s>', self)
+ self._execute_query(query_scope, attributes)
+
+ return self.entries
+
+ def search_object(self, entry_dn=None, attributes=None): # base must be a single dn
+ """Perform the LDAP search operation SINGLE_OBJECT scope
+
+ :return: Entry found in search
+
+ """
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing object search in <%s>', self)
+ self.clear()
+ if entry_dn:
+ old_base = self.base
+ self.base = entry_dn
+ self._execute_query(BASE, attributes)
+ self.base = old_base
+ else:
+ self._execute_query(BASE, attributes)
+
+ return self.entries[0] if len(self.entries) > 0 else None
+
+ def search_level(self, attributes=None):
+ """Perform the LDAP search operation with SINGLE_LEVEL scope
+
+ :return: Entries found in search
+
+ """
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing single level search in <%s>', self)
+ self.clear()
+ self._execute_query(LEVEL, attributes)
+
+ return self.entries
+
+ def search_subtree(self, attributes=None):
+ """Perform the LDAP search operation WHOLE_SUBTREE scope
+
+ :return: Entries found in search
+
+ """
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing whole subtree search in <%s>', self)
+ self.clear()
+ self._execute_query(SUBTREE, attributes)
+
+ return self.entries
+
+ def _entries_generator(self, responses):
+ for response in responses:
+ yield self._create_entry(response)
+
+ def search_paged(self, paged_size, paged_criticality=True, generator=True, attributes=None):
+ """Perform a paged search, can be called as an Iterator
+
+ :param attributes: optional attributes to search
+ :param paged_size: number of entries returned in each search
+ :type paged_size: int
+ :param paged_criticality: specify if server must not execute the search if it is not capable of paging searches
+ :type paged_criticality: bool
+ :param generator: if True the paged searches are executed while generating the entries,
+ if False all the paged searches are execute before returning the generator
+ :type generator: bool
+ :return: Entries found in search
+
+ """
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing paged search in <%s> with paged size %s', self, str(paged_size))
+ if not self.connection:
+ error_message = 'no connection established'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ self.clear()
+ self._create_query_filter()
+ self.entries = []
+ self.execution_time = datetime.now()
+ response = self.connection.extend.standard.paged_search(search_base=self.base,
+ search_filter=self.query_filter,
+ search_scope=SUBTREE if self.sub_tree else LEVEL,
+ dereference_aliases=self.dereference_aliases,
+ attributes=attributes if attributes else self.attributes,
+ get_operational_attributes=self.get_operational_attributes,
+ controls=self.controls,
+ paged_size=paged_size,
+ paged_criticality=paged_criticality,
+ generator=generator)
+ if generator:
+ return self._entries_generator(response)
+ else:
+ return list(self._entries_generator(response))
+
+
+class Writer(Cursor):
+ entry_class = WritableEntry
+ attribute_class = WritableAttribute
+ entry_initial_status = STATUS_WRITABLE
+
+ @staticmethod
+ def from_cursor(cursor, connection=None, object_def=None, custom_validator=None):
+ if connection is None:
+ connection = cursor.connection
+ if object_def is None:
+ object_def = cursor.definition
+ writer = Writer(connection, object_def, attributes=cursor.attributes)
+ for entry in cursor.entries:
+ if isinstance(cursor, Reader):
+ entry.entry_writable(object_def, writer, custom_validator=custom_validator)
+ elif isinstance(cursor, Writer):
+ pass
+ else:
+ error_message = 'unknown cursor type %s' % str(type(cursor))
+ if log_enabled(ERROR):
+ log(ERROR, '%s', error_message)
+ raise LDAPCursorError(error_message)
+ writer.execution_time = cursor.execution_time
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Writer Cursor <%r> from cursor <%r>', writer, cursor)
+ return writer
+
+ @staticmethod
+ def from_response(connection, object_def, response=None):
+ if response is None:
+ if not connection.strategy.sync:
+ error_message = 'with asynchronous strategies response must be specified'
+ if log_enabled(ERROR):
+ log(ERROR, '%s', error_message)
+ raise LDAPCursorError(error_message)
+ elif connection.response:
+ response = connection.response
+ else:
+ error_message = 'response not present'
+ if log_enabled(ERROR):
+ log(ERROR, '%s', error_message)
+ raise LDAPCursorError(error_message)
+ writer = Writer(connection, object_def)
+
+ for resp in response:
+ if resp['type'] == 'searchResEntry':
+ entry = writer._create_entry(resp)
+ writer.entries.append(entry)
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Writer Cursor <%r> from response', writer)
+ return writer
+
+ def __init__(self, connection, object_def, get_operational_attributes=False, attributes=None, controls=None, auxiliary_class=None):
+ Cursor.__init__(self, connection, object_def, get_operational_attributes, attributes, controls, auxiliary_class)
+ self.dereference_aliases = DEREF_NEVER
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Writer Cursor: <%r>', self)
+
+ def commit(self, refresh=True):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'committed changes for <%s>', self)
+ self._reset_history()
+ successful = True
+ for entry in self.entries:
+ if not entry.entry_commit_changes(refresh=refresh, controls=self.controls, clear_history=False):
+ successful = False
+
+ self.execution_time = datetime.now()
+
+ return successful
+
+ def discard(self):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'discarded changes for <%s>', self)
+ for entry in self.entries:
+ entry.entry_discard_changes()
+
+ def _refresh_object(self, entry_dn, attributes=None, tries=4, seconds=2, controls=None): # base must be a single dn
+ """Performs the LDAP search operation SINGLE_OBJECT scope
+
+ :return: Entry found in search
+
+ """
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'refreshing object <%s> for <%s>', entry_dn, self)
+ if not self.connection:
+ error_message = 'no connection established'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ response = []
+ with self.connection:
+ counter = 0
+ while counter < tries:
+ result = self.connection.search(search_base=entry_dn,
+ search_filter='(objectclass=*)',
+ search_scope=BASE,
+ dereference_aliases=DEREF_NEVER,
+ attributes=attributes if attributes else self.attributes,
+ get_operational_attributes=self.get_operational_attributes,
+ controls=controls)
+ if not self.connection.strategy.sync:
+ response, result, request = self.connection.get_response(result, get_request=True)
+ else:
+ response = self.connection.response
+ result = self.connection.result
+ request = self.connection.request
+
+ if result['result'] in [RESULT_SUCCESS]:
+ break
+ sleep(seconds)
+ counter += 1
+ self._store_operation_in_history(request, result, response)
+
+ if len(response) == 1:
+ return self._create_entry(response[0])
+ elif len(response) == 0:
+ return None
+
+ error_message = 'more than 1 entry returned for a single object search'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ def new(self, dn):
+ if log_enabled(BASIC):
+ log(BASIC, 'creating new entry <%s> for <%s>', dn, self)
+ dn = safe_dn(dn)
+ for entry in self.entries: # checks if dn is already used in an cursor entry
+ if entry.entry_dn == dn:
+ error_message = 'dn already present in cursor'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ rdns = safe_rdn(dn, decompose=True)
+ entry = self.entry_class(dn, self) # defines a new empty Entry
+ for attr in entry.entry_mandatory_attributes: # defines all mandatory attributes as virtual
+ entry._state.attributes[attr] = self.attribute_class(entry._state.definition[attr], entry, self)
+ entry.__dict__[attr] = entry._state.attributes[attr]
+ entry.objectclass.set(self.definition._object_class)
+ for rdn in rdns: # adds virtual attributes from rdns in entry name (should be more than one with + syntax)
+ if rdn[0] in entry._state.definition._attributes:
+ rdn_name = entry._state.definition._attributes[rdn[0]].name # normalize case folding
+ if rdn_name not in entry._state.attributes:
+ entry._state.attributes[rdn_name] = self.attribute_class(entry._state.definition[rdn_name], entry, self)
+ entry.__dict__[rdn_name] = entry._state.attributes[rdn_name]
+ entry.__dict__[rdn_name].set(rdn[1])
+ else:
+ error_message = 'rdn type \'%s\' not in object class definition' % rdn[0]
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ entry._state.set_status(STATUS_VIRTUAL) # set intial status
+ entry._state.set_status(STATUS_PENDING_CHANGES) # tries to change status to PENDING_CHANGES. If mandatory attributes are missing status is reverted to MANDATORY_MISSING
+ self.entries.append(entry)
+ return entry
+
+ def refresh_entry(self, entry, tries=4, seconds=2):
+ conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
+
+ self._do_not_reset = True
+ attr_list = []
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'refreshing entry <%s> for <%s>', entry, self)
+ for attr in entry._state.attributes: # check friendly attribute name in AttrDef, do not check operational attributes
+ if attr.lower().startswith(conf_operational_attribute_prefix.lower()):
+ continue
+ if entry._state.definition[attr].name:
+ attr_list.append(entry._state.definition[attr].name)
+ else:
+ attr_list.append(entry._state.definition[attr].key)
+
+ temp_entry = self._refresh_object(entry.entry_dn, attr_list, tries, seconds=seconds) # if any attributes is added adds only to the entry not to the definition
+ self._do_not_reset = False
+ if temp_entry:
+ temp_entry._state.origin = entry._state.origin
+ entry.__dict__.clear()
+ entry.__dict__['_state'] = temp_entry._state
+ for attr in entry._state.attributes: # returns the attribute key
+ entry.__dict__[attr] = entry._state.attributes[attr]
+
+ for attr in entry.entry_attributes: # if any attribute of the class was deleted makes it virtual
+ if attr not in entry._state.attributes and attr in entry.entry_definition._attributes:
+ entry._state.attributes[attr] = WritableAttribute(entry.entry_definition[attr], entry, self)
+ entry.__dict__[attr] = entry._state.attributes[attr]
+ entry._state.set_status(entry._state._initial_status)
+ return True
+ return False
diff --git a/ldap3/abstract/entry.py b/ldap3/abstract/entry.py
index a6a222f..b73c50f 100644
--- a/ldap3/abstract/entry.py
+++ b/ldap3/abstract/entry.py
@@ -1,662 +1,699 @@
-"""
-"""
-
-# Created on 2016.08.19
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2016 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-
-import json
-try:
- from collections import OrderedDict
-except ImportError:
- from ..utils.ordDict import OrderedDict # for Python 2.6
-
-from os import linesep
-
-from .. import STRING_TYPES, SEQUENCE_TYPES
-from .attribute import WritableAttribute
-from .objectDef import ObjectDef
-from .attrDef import AttrDef
-from ..core.exceptions import LDAPKeyError, LDAPCursorError
-from ..utils.conv import check_json_dict, format_json, prepare_for_stream
-from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
-from ..utils.dn import safe_dn, safe_rdn, to_dn
-from ..utils.repr import to_stdout_encoding
-from ..utils.ciDict import CaseInsensitiveWithAliasDict
-from ..utils.config import get_config_parameter
-from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\
- STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES
-from ..core.results import RESULT_SUCCESS
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
-
-
-class EntryState(object):
- """Contains data on the status of the entry. Does not pollute the Entry __dict__.
-
- """
-
- def __init__(self, dn, cursor):
- self.dn = dn
- self._initial_status = None
- self._to = None # used for move and rename
- self.status = STATUS_INIT
- self.attributes = CaseInsensitiveWithAliasDict()
- self.raw_attributes = CaseInsensitiveWithAliasDict()
- self.response = None
- self.cursor = cursor
- self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server)
- self.read_time = None
- self.changes = OrderedDict() # includes changes to commit in a writable entry
- if cursor.definition:
- self.definition = cursor.definition
- else:
- self.definition = None
-
- def __repr__(self):
- if self.__dict__ and self.dn is not None:
- r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '<never>') + linesep
- r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep
- r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '<None>') + linesep
- r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep
- r += 'response: ' + ('present' if self.response else '<None>') + linesep
- r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '<None>') + linesep
- return r
- else:
- return object.__repr__(self)
-
- def __str__(self):
- return self.__repr__()
-
- def set_status(self, status):
- conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')]
- if status not in STATUSES:
- error_message = 'invalid entry status ' + str(status)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- if status in INITIAL_STATUSES:
- self._initial_status = status
- self.status = status
- if status == STATUS_DELETED:
- self._initial_status = STATUS_VIRTUAL
- if status == STATUS_COMMITTED:
- self._initial_status = STATUS_WRITABLE
- if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries
- for attr in self.definition._attributes:
- if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def:
- if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes:
- self.status = STATUS_MANDATORY_MISSING
- break
-
-
-class EntryBase(object):
- """The Entry object contains a single LDAP entry.
- Attributes can be accessed either by sequence, by assignment
- or as dictionary keys. Keys are not case sensitive.
-
- The Entry object is read only
-
- - The DN is retrieved by _dn
- - The cursor reference is in _cursor
- - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods
- """
-
- def __init__(self, dn, cursor):
- self.__dict__['_state'] = EntryState(dn, cursor)
-
- def __repr__(self):
- if self.__dict__ and self.entry_dn is not None:
- r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '<never>') + linesep
- if self._state.attributes:
- for attr in sorted(self._state.attributes):
- if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes):
- r += ' ' + repr(self._state.attributes[attr]) + linesep
- return r
- else:
- return object.__repr__(self)
-
- def __str__(self):
- return self.__repr__()
-
- def __iter__(self):
- for attribute in self._state.attributes:
- yield self._state.attributes[attribute]
- # raise StopIteration # deprecated in PEP 479
- return
-
- def __contains__(self, item):
- try:
- self.__getitem__(item)
- return True
- except LDAPKeyError:
- return False
-
- def __getattr__(self, item):
- if isinstance(item, STRING_TYPES):
- if item == '_state':
- return self.__dict__['_state']
- item = ''.join(item.split()).lower()
- attr_found = None
- for attr in self._state.attributes.keys():
- if item == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.aliases():
- if item == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.keys():
- if item + ';binary' == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.aliases():
- if item + ';binary' == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.keys():
- if item + ';range' in attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.aliases():
- if item + ';range' in attr.lower():
- attr_found = attr
- break
- if not attr_found:
- error_message = 'attribute \'%s\' not found' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- return self._state.attributes[attr]
- error_message = 'attribute name must be a string'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- def __setattr__(self, item, value):
- if item in self._state.attributes:
- error_message = 'attribute \'%s\' is read only' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- else:
- error_message = 'entry \'%s\' is read only' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- def __getitem__(self, item):
- if isinstance(item, STRING_TYPES):
- item = ''.join(item.split()).lower()
- attr_found = None
- for attr in self._state.attributes.keys():
- if item == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.aliases():
- if item == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.keys():
- if item + ';binary' == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- for attr in self._state.attributes.aliases():
- if item + ';binary' == attr.lower():
- attr_found = attr
- break
- if not attr_found:
- error_message = 'key \'%s\' not found' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- return self._state.attributes[attr]
-
- error_message = 'key must be a string'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPKeyError(error_message)
-
- def __eq__(self, other):
- if isinstance(other, EntryBase):
- return self.entry_dn == other.entry_dn
-
- return False
-
- def __lt__(self, other):
- if isinstance(other, EntryBase):
- return self.entry_dn <= other.entry_dn
-
- return False
-
- @property
- def entry_dn(self):
- return self._state.dn
-
- @property
- def entry_cursor(self):
- return self._state.cursor
-
- @property
- def entry_status(self):
- return self._state.status
-
- @property
- def entry_definition(self):
- return self._state.definition
-
- @property
- def entry_raw_attributes(self):
- return self._state.entry_raw_attributes
-
- def entry_raw_attribute(self, name):
- """
-
- :param name: name of the attribute
- :return: raw (unencoded) value of the attribute, None if attribute is not found
- """
- return self._state.entry_raw_attributes[name] if name in self._state.entry_raw_attributes else None
-
- @property
- def entry_mandatory_attributes(self):
- return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory]
-
- @property
- def entry_attributes(self):
- # attr_list = list()
- # for attr in self._state.attributes:
- # if self._state.definition[attr].name:
- # attr_list.append(self._state.definition[attr].name)
- # else:
- # attr_list.append(self._state.definition[attr].key)
- # return attr_list
- return list(self._state.attributes.keys())
-
- @property
- def entry_attributes_as_dict(self):
- return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items())
-
- @property
- def entry_read_time(self):
- return self._state.read_time
-
- @property
- def _changes(self):
- return self._state.changes
-
- def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True):
- json_entry = dict()
- json_entry['dn'] = self.entry_dn
- if checked_attributes:
- if not include_empty:
- # needed for python 2.6 compatibility
- json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key])
- else:
- json_entry['attributes'] = self.entry_attributes_as_dict
- if raw:
- if not include_empty:
- # needed for python 2.6 compatibility
- json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key])
- else:
- json_entry['raw'] = dict(self.entry_raw_attributes)
-
- if str is bytes: # Python 2
- check_json_dict(json_entry)
-
- json_output = json.dumps(json_entry,
- ensure_ascii=True,
- sort_keys=sort,
- indent=indent,
- check_circular=True,
- default=format_json,
- separators=(',', ': '))
-
- if stream:
- stream.write(json_output)
-
- return json_output
-
- def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None):
- ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order)
- ldif_lines = add_ldif_header(ldif_lines)
- line_separator = line_separator or linesep
- ldif_output = line_separator.join(ldif_lines)
- if stream:
- if stream.tell() == 0:
- header = add_ldif_header(['-'])[0]
- stream.write(prepare_for_stream(header + line_separator + line_separator))
- stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
- return ldif_output
-
-
-class Entry(EntryBase):
- """The Entry object contains a single LDAP entry.
- Attributes can be accessed either by sequence, by assignment
- or as dictionary keys. Keys are not case sensitive.
-
- The Entry object is read only
-
- - The DN is retrieved by _dn()
- - The Reader reference is in _cursor()
- - Raw attributes values are retrieved by the _ra_attributes and
- _raw_attribute() methods
-
- """
- def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None):
- if not self.entry_cursor.schema:
- error_message = 'schema must be available to make an entry writable'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- # returns a new WritableEntry and its Writer cursor
- if object_def is None:
- if self.entry_cursor.definition._object_class:
- object_def = self.entry_cursor.definition._object_class
- elif 'objectclass' in self:
- object_def = self.objectclass.values
-
- if not object_def:
- error_message = 'object class must be specified to make an entry writable'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- if not isinstance(object_def, ObjectDef):
- object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator)
-
- if attributes:
- if isinstance(attributes, STRING_TYPES):
- attributes = [attributes]
-
- if isinstance(attributes, SEQUENCE_TYPES):
- for attribute in attributes:
- if attribute not in object_def._attributes:
- error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- else:
- attributes = []
-
- if not writer_cursor:
- from .cursor import Writer # local import to avoid circular reference in import at startup
- writable_cursor = Writer(self.entry_cursor.connection, object_def)
- else:
- writable_cursor = writer_cursor
-
- if attributes: # force reading of attributes
- writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes)
- else:
- writable_entry = writable_cursor._create_entry(self._state.response)
- writable_cursor.entries.append(writable_entry)
- writable_entry._state.read_time = self.entry_read_time
- writable_entry._state.origin = self # reference to the original read-only entry
- # checks original entry for custom definitions in AttrDefs
- for attr in writable_entry._state.origin.entry_definition._attributes:
- original_attr = writable_entry._state.origin.entry_definition._attributes[attr]
- if attr != original_attr.name and attr not in writable_entry._state.attributes:
- old_attr_def = writable_entry.entry_definition._attributes[original_attr.name]
- new_attr_def = AttrDef(original_attr.name,
- key=attr,
- validate=original_attr.validate,
- pre_query=original_attr.pre_query,
- post_query=original_attr.post_query,
- default=original_attr.default,
- dereference_dn=original_attr.dereference_dn,
- description=original_attr.description,
- mandatory=old_attr_def.mandatory, # keeps value read from schema
- single_value=old_attr_def.single_value, # keeps value read from schema
- alias=original_attr.other_names)
- object_def = writable_entry.entry_definition
- object_def -= old_attr_def
- object_def += new_attr_def
- # updates attribute name in entry attributes
- new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor)
- if original_attr.name in writable_entry._state.attributes:
- new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names
- new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values
- new_attr.values = writable_entry._state.attributes[original_attr.name].values
- new_attr.response = writable_entry._state.attributes[original_attr.name].response
- writable_entry._state.attributes[attr] = new_attr
- # writable_entry._state.attributes.set_alias(attr, new_attr.other_names)
- del writable_entry._state.attributes[original_attr.name]
-
- writable_entry._state.set_status(STATUS_WRITABLE)
- return writable_entry
-
-
-class WritableEntry(EntryBase):
- def __setitem__(self, key, value):
- if value is not Ellipsis: # hack for using implicit operators in writable attributes
- self.__setattr__(key, value)
-
- def __setattr__(self, item, value):
- conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
- if item == '_state' and isinstance(value, EntryState):
- self.__dict__['_state'] = value
- return
-
- if value is not Ellipsis: # hack for using implicit operators in writable attributes
- # checks if using an alias
- if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def:
- if item not in self._state.attributes: # setting value to an attribute still without values
- new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor)
- self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict
- self._state.attributes[item].set(value) # try to add to new_values
- else:
- error_message = 'attribute \'%s\' not defined' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- def __getattr__(self, item):
- if isinstance(item, STRING_TYPES):
- if item == '_state':
- return self.__dict__['_state']
- item = ''.join(item.split()).lower()
- for attr in self._state.attributes.keys():
- if item == attr.lower():
- return self._state.attributes[attr]
- for attr in self._state.attributes.aliases():
- if item == attr.lower():
- return self._state.attributes[attr]
- if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive
- self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor)
- self.entry_cursor.attributes.add(item)
- return self._state.attributes[item]
- error_message = 'attribute \'%s\' not defined' % item
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- else:
- error_message = 'attribute name must be a string'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
-
- @property
- def entry_virtual_attributes(self):
- return [attr for attr in self.entry_attributes if self[attr].virtual]
-
- def entry_commit_changes(self, refresh=True, controls=None, clear_history=True):
- if clear_history:
- self.entry_cursor._reset_history()
-
- if self.entry_status == STATUS_READY_FOR_DELETION:
- result = self.entry_cursor.connection.delete(self.entry_dn, controls)
- if not self.entry_cursor.connection.strategy.sync:
- response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
- else:
- response = self.entry_cursor.connection.response
- result = self.entry_cursor.connection.result
- request = self.entry_cursor.connection.request
- self.entry_cursor._store_operation_in_history(request, result, response)
- if result['result'] == RESULT_SUCCESS:
- dn = self.entry_dn
- if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry
- cursor = self._state.origin.entry_cursor
- self._state.origin.__dict__.clear()
- self._state.origin.__dict__['_state'] = EntryState(dn, cursor)
- self._state.origin._state.set_status(STATUS_DELETED)
- cursor = self.entry_cursor
- self.__dict__.clear()
- self._state = EntryState(dn, cursor)
- self._state.set_status(STATUS_DELETED)
- return True
- return False
- elif self.entry_status == STATUS_READY_FOR_MOVING:
- result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to)
- if not self.entry_cursor.connection.strategy.sync:
- response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
- else:
- response = self.entry_cursor.connection.response
- result = self.entry_cursor.connection.result
- request = self.entry_cursor.connection.request
- self.entry_cursor._store_operation_in_history(request, result, response)
- if result['result'] == RESULT_SUCCESS:
- self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to)
- if refresh:
- if self.entry_refresh():
- if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
- self._state.origin._state.dn = self.entry_dn
- self._state.set_status(STATUS_COMMITTED)
- self._state._to = None
- return True
- return False
- elif self.entry_status == STATUS_READY_FOR_RENAMING:
- rdn = '+'.join(safe_rdn(self._state._to))
- result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn)
- if not self.entry_cursor.connection.strategy.sync:
- response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
- else:
- response = self.entry_cursor.connection.response
- result = self.entry_cursor.connection.result
- request = self.entry_cursor.connection.request
- self.entry_cursor._store_operation_in_history(request, result, response)
- if result['result'] == RESULT_SUCCESS:
- self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:])
- if refresh:
- if self.entry_refresh():
- if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
- self._state.origin._state.dn = self.entry_dn
- self._state.set_status(STATUS_COMMITTED)
- self._state._to = None
- return True
- return False
- elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]:
- missing_attributes = []
- for attr in self.entry_mandatory_attributes:
- if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes:
- missing_attributes.append('\'' + attr + '\'')
- error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn)
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- elif self.entry_status == STATUS_PENDING_CHANGES:
- if self._changes:
- if self._state._initial_status == STATUS_VIRTUAL:
- new_attributes = dict()
- for attr in self._changes:
- new_attributes[attr] = self._changes[attr][0][1]
- result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls)
- else:
- result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls)
-
- if not self.entry_cursor.connection.strategy.sync: # asynchronous request
- response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
- else:
- response = self.entry_cursor.connection.response
- result = self.entry_cursor.connection.result
- request = self.entry_cursor.connection.request
- self.entry_cursor._store_operation_in_history(request, result, response)
-
- if result['result'] == RESULT_SUCCESS:
- if refresh:
- if self.entry_refresh():
- if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present
- for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing
- if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes:
- self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing
- temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response)
- self._state.origin.__dict__.clear()
- self._state.origin.__dict__['_state'] = temp_entry._state
- for attr in self: # returns the whole attribute object
- if not attr.virtual:
- self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key]
- self._state.origin._state.read_time = self.entry_read_time
- else:
- self.entry_discard_changes() # if not refreshed remove committed changes
- self._state.set_status(STATUS_COMMITTED)
- return True
- return False
-
- def entry_discard_changes(self):
- self._changes.clear()
- self._state.set_status(self._state._initial_status)
-
- def entry_delete(self):
- if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]:
- error_message = 'cannot delete entry, invalid status: ' + self.entry_status
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self._state.set_status(STATUS_READY_FOR_DELETION)
-
- def entry_refresh(self, tries=4, seconds=2):
- """
-
- Refreshes the entry from the LDAP Server
- """
- if self.entry_cursor.connection:
- if self.entry_cursor.refresh_entry(self, tries, seconds):
- return True
-
- return False
-
- def entry_move(self, destination_dn):
- if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]:
- error_message = 'cannot move entry, invalid status: ' + self.entry_status
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self._state._to = safe_dn(destination_dn)
- self._state.set_status(STATUS_READY_FOR_MOVING)
-
- def entry_rename(self, new_name):
- if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]:
- error_message = 'cannot rename entry, invalid status: ' + self.entry_status
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', error_message, self)
- raise LDAPCursorError(error_message)
- self._state._to = new_name
- self._state.set_status(STATUS_READY_FOR_RENAMING)
-
- @property
- def entry_changes(self):
- return self._changes
+"""
+"""
+
+# Created on 2016.08.19
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2016 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+
+import json
+try:
+ from collections import OrderedDict
+except ImportError:
+ from ..utils.ordDict import OrderedDict # for Python 2.6
+
+from os import linesep
+
+from .. import STRING_TYPES, SEQUENCE_TYPES, MODIFY_ADD, MODIFY_REPLACE
+from .attribute import WritableAttribute
+from .objectDef import ObjectDef
+from .attrDef import AttrDef
+from ..core.exceptions import LDAPKeyError, LDAPCursorError, LDAPCursorAttributeError
+from ..utils.conv import check_json_dict, format_json, prepare_for_stream
+from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
+from ..utils.dn import safe_dn, safe_rdn, to_dn
+from ..utils.repr import to_stdout_encoding
+from ..utils.ciDict import CaseInsensitiveWithAliasDict
+from ..utils.config import get_config_parameter
+from . import STATUS_VIRTUAL, STATUS_WRITABLE, STATUS_PENDING_CHANGES, STATUS_COMMITTED, STATUS_DELETED,\
+ STATUS_INIT, STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_MANDATORY_MISSING, STATUSES, INITIAL_STATUSES
+from ..core.results import RESULT_SUCCESS
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
+
+
+class EntryState(object):
+ """Contains data on the status of the entry. Does not pollute the Entry __dict__.
+
+ """
+
+ def __init__(self, dn, cursor):
+ self.dn = dn
+ self._initial_status = None
+ self._to = None # used for move and rename
+ self.status = STATUS_INIT
+ self.attributes = CaseInsensitiveWithAliasDict()
+ self.raw_attributes = CaseInsensitiveWithAliasDict()
+ self.response = None
+ self.cursor = cursor
+ self.origin = None # reference to the original read-only entry (set when made writable). Needed to update attributes in read-only when modified (only if both refer the same server)
+ self.read_time = None
+ self.changes = OrderedDict() # includes changes to commit in a writable entry
+ if cursor.definition:
+ self.definition = cursor.definition
+ else:
+ self.definition = None
+
+ def __repr__(self):
+ if self.__dict__ and self.dn is not None:
+ r = 'DN: ' + to_stdout_encoding(self.dn) + ' - STATUS: ' + ((self._initial_status + ', ') if self._initial_status != self.status else '') + self.status + ' - READ TIME: ' + (self.read_time.isoformat() if self.read_time else '<never>') + linesep
+ r += 'attributes: ' + ', '.join(sorted(self.attributes.keys())) + linesep
+ r += 'object def: ' + (', '.join(sorted(self.definition._object_class)) if self.definition._object_class else '<None>') + linesep
+ r += 'attr defs: ' + ', '.join(sorted(self.definition._attributes.keys())) + linesep
+ r += 'response: ' + ('present' if self.response else '<None>') + linesep
+ r += 'cursor: ' + (self.cursor.__class__.__name__ if self.cursor else '<None>') + linesep
+ return r
+ else:
+ return object.__repr__(self)
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __getstate__(self):
+ cpy = dict(self.__dict__)
+ cpy['cursor'] = None
+ return cpy
+
+ def set_status(self, status):
+ conf_ignored_mandatory_attributes_in_object_def = [v.lower() for v in get_config_parameter('IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF')]
+ if status not in STATUSES:
+ error_message = 'invalid entry status ' + str(status)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ if status in INITIAL_STATUSES:
+ self._initial_status = status
+ self.status = status
+ if status == STATUS_DELETED:
+ self._initial_status = STATUS_VIRTUAL
+ if status == STATUS_COMMITTED:
+ self._initial_status = STATUS_WRITABLE
+ if self.status == STATUS_VIRTUAL or (self.status == STATUS_PENDING_CHANGES and self._initial_status == STATUS_VIRTUAL): # checks if all mandatory attributes are present in new entries
+ for attr in self.definition._attributes:
+ if self.definition._attributes[attr].mandatory and attr.lower() not in conf_ignored_mandatory_attributes_in_object_def:
+ if (attr not in self.attributes or self.attributes[attr].virtual) and attr not in self.changes:
+ self.status = STATUS_MANDATORY_MISSING
+ break
+
+ @property
+ def entry_raw_attributes(self):
+ return self.raw_attributes
+
+
+class EntryBase(object):
+ """The Entry object contains a single LDAP entry.
+ Attributes can be accessed either by sequence, by assignment
+ or as dictionary keys. Keys are not case sensitive.
+
+ The Entry object is read only
+
+ - The DN is retrieved by entry_dn
+ - The cursor reference is in _cursor
+ - Raw attributes values are retrieved with _raw_attributes and the _raw_attribute() methods
+ """
+
+ def __init__(self, dn, cursor):
+ self._state = EntryState(dn, cursor)
+
+ def __repr__(self):
+ if self.__dict__ and self.entry_dn is not None:
+ r = 'DN: ' + to_stdout_encoding(self.entry_dn) + ' - STATUS: ' + ((self._state._initial_status + ', ') if self._state._initial_status != self.entry_status else '') + self.entry_status + ' - READ TIME: ' + (self.entry_read_time.isoformat() if self.entry_read_time else '<never>') + linesep
+ if self._state.attributes:
+ for attr in sorted(self._state.attributes):
+ if self._state.attributes[attr] or (hasattr(self._state.attributes[attr], 'changes') and self._state.attributes[attr].changes):
+ r += ' ' + repr(self._state.attributes[attr]) + linesep
+ return r
+ else:
+ return object.__repr__(self)
+
+ def __str__(self):
+ return self.__repr__()
+
+ def __iter__(self):
+ for attribute in self._state.attributes:
+ yield self._state.attributes[attribute]
+ # raise StopIteration # deprecated in PEP 479
+ return
+
+ def __contains__(self, item):
+ try:
+ self.__getitem__(item)
+ return True
+ except LDAPKeyError:
+ return False
+
+ def __getattr__(self, item):
+ if isinstance(item, STRING_TYPES):
+ if item == '_state':
+ return object.__getattr__(self, item)
+ item = ''.join(item.split()).lower()
+ attr_found = None
+ for attr in self._state.attributes.keys():
+ if item == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.aliases():
+ if item == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.keys():
+ if item + ';binary' == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.aliases():
+ if item + ';binary' == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.keys():
+ if item + ';range' in attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.aliases():
+ if item + ';range' in attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ error_message = 'attribute \'%s\' not found' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+ return self._state.attributes[attr]
+ error_message = 'attribute name must be a string'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+
+ def __setattr__(self, item, value):
+ if item == '_state':
+ object.__setattr__(self, item, value)
+ elif item in self._state.attributes:
+ error_message = 'attribute \'%s\' is read only' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+ else:
+ error_message = 'entry is read only, cannot add \'%s\'' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+
+ def __getitem__(self, item):
+ if isinstance(item, STRING_TYPES):
+ item = ''.join(item.split()).lower()
+ attr_found = None
+ for attr in self._state.attributes.keys():
+ if item == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.aliases():
+ if item == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.keys():
+ if item + ';binary' == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ for attr in self._state.attributes.aliases():
+ if item + ';binary' == attr.lower():
+ attr_found = attr
+ break
+ if not attr_found:
+ error_message = 'key \'%s\' not found' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPKeyError(error_message)
+ return self._state.attributes[attr]
+
+ error_message = 'key must be a string'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPKeyError(error_message)
+
+ def __eq__(self, other):
+ if isinstance(other, EntryBase):
+ return self.entry_dn == other.entry_dn
+
+ return False
+
+ def __lt__(self, other):
+ if isinstance(other, EntryBase):
+ return self.entry_dn <= other.entry_dn
+
+ return False
+
+ @property
+ def entry_dn(self):
+ return self._state.dn
+
+ @property
+ def entry_cursor(self):
+ return self._state.cursor
+
+ @property
+ def entry_status(self):
+ return self._state.status
+
+ @property
+ def entry_definition(self):
+ return self._state.definition
+
+ @property
+ def entry_raw_attributes(self):
+ return self._state.raw_attributes
+
+ def entry_raw_attribute(self, name):
+ """
+
+ :param name: name of the attribute
+ :return: raw (unencoded) value of the attribute, None if attribute is not found
+ """
+ return self._state.raw_attributes[name] if name in self._state.raw_attributes else None
+
+ @property
+ def entry_mandatory_attributes(self):
+ return [attribute for attribute in self.entry_definition._attributes if self.entry_definition._attributes[attribute].mandatory]
+
+ @property
+ def entry_attributes(self):
+ return list(self._state.attributes.keys())
+
+ @property
+ def entry_attributes_as_dict(self):
+ return dict((attribute_key, attribute_value.values) for (attribute_key, attribute_value) in self._state.attributes.items())
+
+ @property
+ def entry_read_time(self):
+ return self._state.read_time
+
+ @property
+ def _changes(self):
+ return self._state.changes
+
+ def entry_to_json(self, raw=False, indent=4, sort=True, stream=None, checked_attributes=True, include_empty=True):
+ json_entry = dict()
+ json_entry['dn'] = self.entry_dn
+ if checked_attributes:
+ if not include_empty:
+ # needed for python 2.6 compatibility
+ json_entry['attributes'] = dict((key, self.entry_attributes_as_dict[key]) for key in self.entry_attributes_as_dict if self.entry_attributes_as_dict[key])
+ else:
+ json_entry['attributes'] = self.entry_attributes_as_dict
+ if raw:
+ if not include_empty:
+ # needed for python 2.6 compatibility
+ json_entry['raw'] = dict((key, self.entry_raw_attributes[key]) for key in self.entry_raw_attributes if self.entry_raw_attributes[key])
+ else:
+ json_entry['raw'] = dict(self.entry_raw_attributes)
+
+ if str is bytes: # Python 2
+ check_json_dict(json_entry)
+
+ json_output = json.dumps(json_entry,
+ ensure_ascii=True,
+ sort_keys=sort,
+ indent=indent,
+ check_circular=True,
+ default=format_json,
+ separators=(',', ': '))
+
+ if stream:
+ stream.write(json_output)
+
+ return json_output
+
+ def entry_to_ldif(self, all_base64=False, line_separator=None, sort_order=None, stream=None):
+ ldif_lines = operation_to_ldif('searchResponse', [self._state.response], all_base64, sort_order=sort_order)
+ ldif_lines = add_ldif_header(ldif_lines)
+ line_separator = line_separator or linesep
+ ldif_output = line_separator.join(ldif_lines)
+ if stream:
+ if stream.tell() == 0:
+ header = add_ldif_header(['-'])[0]
+ stream.write(prepare_for_stream(header + line_separator + line_separator))
+ stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
+ return ldif_output
+
+
+class Entry(EntryBase):
+ """The Entry object contains a single LDAP entry.
+ Attributes can be accessed either by sequence, by assignment
+ or as dictionary keys. Keys are not case sensitive.
+
+ The Entry object is read only
+
+ - The DN is retrieved by entry_dn
+ - The Reader reference is in _cursor()
+ - Raw attributes values are retrieved by the _ra_attributes and
+ _raw_attribute() methods
+
+ """
+ def entry_writable(self, object_def=None, writer_cursor=None, attributes=None, custom_validator=None, auxiliary_class=None):
+ conf_operational_attribute_prefix = get_config_parameter('ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX')
+ if not self.entry_cursor.schema:
+ error_message = 'schema must be available to make an entry writable'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ # returns a new WritableEntry and its Writer cursor
+ if object_def is None:
+ if self.entry_cursor.definition._object_class:
+ object_def = self.entry_definition._object_class
+ auxiliary_class = self.entry_definition._auxiliary_class + (auxiliary_class if isinstance(auxiliary_class, SEQUENCE_TYPES) else [])
+ elif 'objectclass' in self:
+ object_def = self.objectclass.values
+
+ if not object_def:
+ error_message = 'object class must be specified to make an entry writable'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+
+ if not isinstance(object_def, ObjectDef):
+ object_def = ObjectDef(object_def, self.entry_cursor.schema, custom_validator, auxiliary_class)
+
+ if attributes:
+ if isinstance(attributes, STRING_TYPES):
+ attributes = [attributes]
+
+ if isinstance(attributes, SEQUENCE_TYPES):
+ for attribute in attributes:
+ if attribute not in object_def._attributes:
+ error_message = 'attribute \'%s\' not in schema for \'%s\'' % (attribute, object_def)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ else:
+ attributes = []
+
+ if not writer_cursor:
+ from .cursor import Writer # local import to avoid circular reference in import at startup
+ writable_cursor = Writer(self.entry_cursor.connection, object_def)
+ else:
+ writable_cursor = writer_cursor
+
+ if attributes: # force reading of attributes
+ writable_entry = writable_cursor._refresh_object(self.entry_dn, list(attributes) + self.entry_attributes)
+ else:
+ writable_entry = writable_cursor._create_entry(self._state.response)
+ writable_cursor.entries.append(writable_entry)
+ writable_entry._state.read_time = self.entry_read_time
+ writable_entry._state.origin = self # reference to the original read-only entry
+ # checks original entry for custom definitions in AttrDefs
+ attr_to_add = []
+ attr_to_remove = []
+ object_def_to_add = []
+ object_def_to_remove = []
+ for attr in writable_entry._state.origin.entry_definition._attributes:
+ original_attr = writable_entry._state.origin.entry_definition._attributes[attr]
+ if attr != original_attr.name and (attr not in writable_entry._state.attributes or conf_operational_attribute_prefix + original_attr.name not in writable_entry._state.attributes):
+ old_attr_def = writable_entry.entry_definition._attributes[original_attr.name]
+ new_attr_def = AttrDef(original_attr.name,
+ key=attr,
+ validate=original_attr.validate,
+ pre_query=original_attr.pre_query,
+ post_query=original_attr.post_query,
+ default=original_attr.default,
+ dereference_dn=original_attr.dereference_dn,
+ description=original_attr.description,
+ mandatory=old_attr_def.mandatory, # keeps value read from schema
+ single_value=old_attr_def.single_value, # keeps value read from schema
+ alias=original_attr.other_names)
+ od = writable_entry.entry_definition
+ object_def_to_remove.append(old_attr_def)
+ object_def_to_add.append(new_attr_def)
+ # updates attribute name in entry attributes
+ new_attr = WritableAttribute(new_attr_def, writable_entry, writable_cursor)
+ if original_attr.name in writable_entry._state.attributes:
+ new_attr.other_names = writable_entry._state.attributes[original_attr.name].other_names
+ new_attr.raw_values = writable_entry._state.attributes[original_attr.name].raw_values
+ new_attr.values = writable_entry._state.attributes[original_attr.name].values
+ new_attr.response = writable_entry._state.attributes[original_attr.name].response
+ attr_to_add.append((attr, new_attr))
+ attr_to_remove.append(original_attr.name)
+ # writable_entry._state.attributes[attr] = new_attr
+ ## writable_entry._state.attributes.set_alias(attr, new_attr.other_names)
+ # del writable_entry._state.attributes[original_attr.name]
+ for attr, new_attr in attr_to_add:
+ writable_entry._state.attributes[attr] = new_attr
+ for attr in attr_to_remove:
+ del writable_entry._state.attributes[attr]
+ for object_def in object_def_to_remove:
+ o = writable_entry.entry_definition
+ o -= object_def
+ for object_def in object_def_to_add:
+ o = writable_entry.entry_definition
+ o += object_def
+
+ writable_entry._state.set_status(STATUS_WRITABLE)
+ return writable_entry
+
+
+class WritableEntry(EntryBase):
+ def __setitem__(self, key, value):
+ if value is not Ellipsis: # hack for using implicit operators in writable attributes
+ self.__setattr__(key, value)
+
+ def __setattr__(self, item, value):
+ conf_attributes_excluded_from_object_def = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF')]
+ if item == '_state' and isinstance(value, EntryState):
+ self.__dict__['_state'] = value
+ return
+
+ if value is not Ellipsis: # hack for using implicit operators in writable attributes
+ # checks if using an alias
+ if item in self.entry_cursor.definition._attributes or item.lower() in conf_attributes_excluded_from_object_def:
+ if item not in self._state.attributes: # setting value to an attribute still without values
+ new_attribute = WritableAttribute(self.entry_cursor.definition._attributes[item], self, cursor=self.entry_cursor)
+ self._state.attributes[str(item)] = new_attribute # force item to a string for key in attributes dict
+ self._state.attributes[item].set(value) # try to add to new_values
+ else:
+ error_message = 'attribute \'%s\' not defined' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+
+ def __getattr__(self, item):
+ if isinstance(item, STRING_TYPES):
+ if item == '_state':
+ return self.__dict__['_state']
+ item = ''.join(item.split()).lower()
+ for attr in self._state.attributes.keys():
+ if item == attr.lower():
+ return self._state.attributes[attr]
+ for attr in self._state.attributes.aliases():
+ if item == attr.lower():
+ return self._state.attributes[attr]
+ if item in self.entry_definition._attributes: # item is a new attribute to commit, creates the AttrDef and add to the attributes to retrive
+ self._state.attributes[item] = WritableAttribute(self.entry_definition._attributes[item], self, self.entry_cursor)
+ self.entry_cursor.attributes.add(item)
+ return self._state.attributes[item]
+ error_message = 'attribute \'%s\' not defined' % item
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+ else:
+ error_message = 'attribute name must be a string'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorAttributeError(error_message)
+
+ @property
+ def entry_virtual_attributes(self):
+ return [attr for attr in self.entry_attributes if self[attr].virtual]
+
+ def entry_commit_changes(self, refresh=True, controls=None, clear_history=True):
+ if clear_history:
+ self.entry_cursor._reset_history()
+
+ if self.entry_status == STATUS_READY_FOR_DELETION:
+ result = self.entry_cursor.connection.delete(self.entry_dn, controls)
+ if not self.entry_cursor.connection.strategy.sync:
+ response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
+ else:
+ response = self.entry_cursor.connection.response
+ result = self.entry_cursor.connection.result
+ request = self.entry_cursor.connection.request
+ self.entry_cursor._store_operation_in_history(request, result, response)
+ if result['result'] == RESULT_SUCCESS:
+ dn = self.entry_dn
+ if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # deletes original read-only Entry
+ cursor = self._state.origin.entry_cursor
+ self._state.origin.__dict__.clear()
+ self._state.origin.__dict__['_state'] = EntryState(dn, cursor)
+ self._state.origin._state.set_status(STATUS_DELETED)
+ cursor = self.entry_cursor
+ self.__dict__.clear()
+ self._state = EntryState(dn, cursor)
+ self._state.set_status(STATUS_DELETED)
+ return True
+ return False
+ elif self.entry_status == STATUS_READY_FOR_MOVING:
+ result = self.entry_cursor.connection.modify_dn(self.entry_dn, '+'.join(safe_rdn(self.entry_dn)), new_superior=self._state._to)
+ if not self.entry_cursor.connection.strategy.sync:
+ response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
+ else:
+ response = self.entry_cursor.connection.response
+ result = self.entry_cursor.connection.result
+ request = self.entry_cursor.connection.request
+ self.entry_cursor._store_operation_in_history(request, result, response)
+ if result['result'] == RESULT_SUCCESS:
+ self._state.dn = safe_dn('+'.join(safe_rdn(self.entry_dn)) + ',' + self._state._to)
+ if refresh:
+ if self.entry_refresh():
+ if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
+ self._state.origin._state.dn = self.entry_dn
+ self._state.set_status(STATUS_COMMITTED)
+ self._state._to = None
+ return True
+ return False
+ elif self.entry_status == STATUS_READY_FOR_RENAMING:
+ rdn = '+'.join(safe_rdn(self._state._to))
+ result = self.entry_cursor.connection.modify_dn(self.entry_dn, rdn)
+ if not self.entry_cursor.connection.strategy.sync:
+ response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
+ else:
+ response = self.entry_cursor.connection.response
+ result = self.entry_cursor.connection.result
+ request = self.entry_cursor.connection.request
+ self.entry_cursor._store_operation_in_history(request, result, response)
+ if result['result'] == RESULT_SUCCESS:
+ self._state.dn = rdn + ',' + ','.join(to_dn(self.entry_dn)[1:])
+ if refresh:
+ if self.entry_refresh():
+ if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # refresh dn of origin
+ self._state.origin._state.dn = self.entry_dn
+ self._state.set_status(STATUS_COMMITTED)
+ self._state._to = None
+ return True
+ return False
+ elif self.entry_status in [STATUS_VIRTUAL, STATUS_MANDATORY_MISSING]:
+ missing_attributes = []
+ for attr in self.entry_mandatory_attributes:
+ if (attr not in self._state.attributes or self._state.attributes[attr].virtual) and attr not in self._changes:
+ missing_attributes.append('\'' + attr + '\'')
+ error_message = 'mandatory attributes %s missing in entry %s' % (', '.join(missing_attributes), self.entry_dn)
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ elif self.entry_status == STATUS_PENDING_CHANGES:
+ if self._changes:
+ if self.entry_definition._auxiliary_class: # checks if an attribute is from an auxiliary class and adds it to the objectClass attribute if not present
+ for attr in self._changes:
+ # checks schema to see if attribute is defined in one of the already present object classes
+ attr_classes = self.entry_cursor.schema.attribute_types[attr].mandatory_in + self.entry_cursor.schema.attribute_types[attr].optional_in
+ for object_class in self.objectclass:
+ if object_class in attr_classes:
+ break
+ else: # executed only if the attribute class is not present in the objectClass attribute
+ # checks if attribute is defined in one of the possible auxiliary classes
+ for aux_class in self.entry_definition._auxiliary_class:
+ if aux_class in attr_classes:
+ if self._state._initial_status == STATUS_VIRTUAL: # entry is new, there must be a pending objectClass MODIFY_REPLACE
+ self._changes['objectClass'][0][1].append(aux_class)
+ else:
+ self.objectclass += aux_class
+ if self._state._initial_status == STATUS_VIRTUAL:
+ new_attributes = dict()
+ for attr in self._changes:
+ new_attributes[attr] = self._changes[attr][0][1]
+ result = self.entry_cursor.connection.add(self.entry_dn, None, new_attributes, controls)
+ else:
+ result = self.entry_cursor.connection.modify(self.entry_dn, self._changes, controls)
+
+ if not self.entry_cursor.connection.strategy.sync: # asynchronous request
+ response, result, request = self.entry_cursor.connection.get_response(result, get_request=True)
+ else:
+ response = self.entry_cursor.connection.response
+ result = self.entry_cursor.connection.result
+ request = self.entry_cursor.connection.request
+ self.entry_cursor._store_operation_in_history(request, result, response)
+
+ if result['result'] == RESULT_SUCCESS:
+ if refresh:
+ if self.entry_refresh():
+ if self._state.origin and self.entry_cursor.connection.server == self._state.origin.entry_cursor.connection.server: # updates original read-only entry if present
+ for attr in self: # adds AttrDefs from writable entry to origin entry definition if some is missing
+ if attr.key in self.entry_definition._attributes and attr.key not in self._state.origin.entry_definition._attributes:
+ self._state.origin.entry_cursor.definition.add_attribute(self.entry_cursor.definition._attributes[attr.key]) # adds AttrDef from writable entry to original entry if missing
+ temp_entry = self._state.origin.entry_cursor._create_entry(self._state.response)
+ self._state.origin.__dict__.clear()
+ self._state.origin.__dict__['_state'] = temp_entry._state
+ for attr in self: # returns the whole attribute object
+ if not hasattr(attr,'virtual'):
+ self._state.origin.__dict__[attr.key] = self._state.origin._state.attributes[attr.key]
+ self._state.origin._state.read_time = self.entry_read_time
+ else:
+ self.entry_discard_changes() # if not refreshed remove committed changes
+ self._state.set_status(STATUS_COMMITTED)
+ return True
+ return False
+
+ def entry_discard_changes(self):
+ self._changes.clear()
+ self._state.set_status(self._state._initial_status)
+
+ def entry_delete(self):
+ if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_DELETION]:
+ error_message = 'cannot delete entry, invalid status: ' + self.entry_status
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self._state.set_status(STATUS_READY_FOR_DELETION)
+
+ def entry_refresh(self, tries=4, seconds=2):
+ """
+
+ Refreshes the entry from the LDAP Server
+ """
+ if self.entry_cursor.connection:
+ if self.entry_cursor.refresh_entry(self, tries, seconds):
+ return True
+
+ return False
+
+ def entry_move(self, destination_dn):
+ if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_MOVING]:
+ error_message = 'cannot move entry, invalid status: ' + self.entry_status
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self._state._to = safe_dn(destination_dn)
+ self._state.set_status(STATUS_READY_FOR_MOVING)
+
+ def entry_rename(self, new_name):
+ if self.entry_status not in [STATUS_WRITABLE, STATUS_COMMITTED, STATUS_READY_FOR_RENAMING]:
+ error_message = 'cannot rename entry, invalid status: ' + self.entry_status
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', error_message, self)
+ raise LDAPCursorError(error_message)
+ self._state._to = new_name
+ self._state.set_status(STATUS_READY_FOR_RENAMING)
+
+ @property
+ def entry_changes(self):
+ return self._changes
diff --git a/ldap3/abstract/objectDef.py b/ldap3/abstract/objectDef.py
index f49bbbe..1f8609c 100644
--- a/ldap3/abstract/objectDef.py
+++ b/ldap3/abstract/objectDef.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -38,18 +38,24 @@ from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED
class ObjectDef(object):
"""Represent an object in the LDAP server. AttrDefs are stored in a dictionary; the key is the friendly name defined in AttrDef.
- AttrDefs can be added and removed using the += ad -= operators
+ AttrDefs can be added and removed using the += and -= operators
ObjectDef can be accessed either as a sequence and a dictionary. When accessed the whole AttrDef instance is returned
"""
- def __init__(self, object_class=None, schema=None, custom_validator=None):
+ def __init__(self, object_class=None, schema=None, custom_validator=None, auxiliary_class=None):
if object_class is None:
object_class = []
if not isinstance(object_class, SEQUENCE_TYPES):
object_class = [object_class]
+ if auxiliary_class is None:
+ auxiliary_class = []
+
+ if not isinstance(auxiliary_class, SEQUENCE_TYPES):
+ auxiliary_class = [auxiliary_class]
+
self.__dict__['_attributes'] = CaseInsensitiveWithAliasDict()
self.__dict__['_custom_validator'] = custom_validator
self.__dict__['_oid_info'] = []
@@ -63,7 +69,7 @@ class ObjectDef(object):
elif isinstance(schema, Connection):
schema = schema.server.schema
elif isinstance(schema, SchemaInfo):
- schema = schema
+ pass
elif schema:
error_message = 'unable to read schema'
if log_enabled(ERROR):
@@ -78,11 +84,17 @@ class ObjectDef(object):
if self._schema:
object_class = [schema.object_classes[name].name[0] for name in object_class] # uses object class names capitalized as in schema
+ auxiliary_class = [schema.object_classes[name].name[0] for name in auxiliary_class]
for object_name in object_class:
if object_name:
self._populate_attr_defs(object_name)
+ for object_name in auxiliary_class:
+ if object_name:
+ self._populate_attr_defs(object_name)
+
self.__dict__['_object_class'] = object_class
+ self.__dict__['_auxiliary_class'] = auxiliary_class
if log_enabled(BASIC):
log(BASIC, 'instantiated ObjectDef: <%r>', self)
@@ -108,10 +120,14 @@ class ObjectDef(object):
def __repr__(self):
if self._object_class:
- r = 'OBJ : ' + ', '.join(self._object_class)
+ r = 'OBJ : ' + ', '.join(self._object_class) + linesep
+ else:
+ r = 'OBJ : <None>' + linesep
+ if self._auxiliary_class:
+ r += 'AUX : ' + ', '.join(self._auxiliary_class) + linesep
else:
- r = 'OBJ : <None>'
- r += ' [' + ', '.join([oid for oid in self._oid_info]) + ']' + linesep
+ r += 'AUX : <None>' + linesep
+ r += 'OID: ' + ', '.join([oid for oid in self._oid_info]) + linesep
r += 'MUST: ' + ', '.join(sorted([attr for attr in self._attributes if self._attributes[attr].mandatory])) + linesep
r += 'MAY : ' + ', '.join(sorted([attr for attr in self._attributes if not self._attributes[attr].mandatory])) + linesep
@@ -250,4 +266,5 @@ class ObjectDef(object):
"""
self.__dict__['object_class'] = None
+ self.__dict__['auxiliary_class'] = None
self.__dict__['_attributes'] = dict()
diff --git a/ldap3/core/connection.py b/ldap3/core/connection.py
index 76ec944..0f148e8 100644
--- a/ldap3/core/connection.py
+++ b/ldap3/core/connection.py
@@ -1,1496 +1,1549 @@
-"""
-"""
-
-# Created on 2014.05.31
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from os import linesep
-from threading import RLock
-from functools import reduce
-import json
-
-from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \
- SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \
- RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, \
- STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL, DIGEST_MD5, GSSAPI, PLAIN
-
-from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE
-from ..extend import ExtendedOperationsRoot
-from .pooling import ServerPool
-from .server import Server
-from ..operation.abandon import abandon_operation, abandon_request_to_dict
-from ..operation.add import add_operation, add_request_to_dict
-from ..operation.bind import bind_operation, bind_request_to_dict
-from ..operation.compare import compare_operation, compare_request_to_dict
-from ..operation.delete import delete_operation, delete_request_to_dict
-from ..operation.extended import extended_operation, extended_request_to_dict
-from ..operation.modify import modify_operation, modify_request_to_dict
-from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict
-from ..operation.search import search_operation, search_request_to_dict
-from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
-from ..protocol.sasl.digestMd5 import sasl_digest_md5
-from ..protocol.sasl.external import sasl_external
-from ..protocol.sasl.plain import sasl_plain
-from ..strategy.sync import SyncStrategy
-from ..strategy.mockAsync import MockAsyncStrategy
-from ..strategy.asynchronous import AsyncStrategy
-from ..strategy.reusable import ReusableStrategy
-from ..strategy.restartable import RestartableStrategy
-from ..strategy.ldifProducer import LdifProducerStrategy
-from ..strategy.mockSync import MockSyncStrategy
-from ..strategy.asyncStream import AsyncStreamStrategy
-from ..operation.unbind import unbind_operation
-from ..protocol.rfc2696 import paged_search_control
-from .usage import ConnectionUsage
-from .tls import Tls
-from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \
- LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \
- LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError
-
-from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data
-from ..utils.dn import safe_dn
-
-
-SASL_AVAILABLE_MECHANISMS = [EXTERNAL,
- DIGEST_MD5,
- GSSAPI,
- PLAIN]
-
-CLIENT_STRATEGIES = [SYNC,
- ASYNC,
- LDIF,
- RESTARTABLE,
- REUSABLE,
- MOCK_SYNC,
- MOCK_ASYNC,
- ASYNC_STREAM]
-
-
-def _format_socket_endpoint(endpoint):
- if endpoint and len(endpoint) == 2: # IPv4
- return str(endpoint[0]) + ':' + str(endpoint[1])
- elif endpoint and len(endpoint) == 4: # IPv6
- return '[' + str(endpoint[0]) + ']:' + str(endpoint[1])
-
- try:
- return str(endpoint)
- except Exception:
- return '?'
-
-
-def _format_socket_endpoints(sock):
- if sock:
- try:
- local = sock.getsockname()
- except Exception:
- local = (None, None, None, None)
- try:
- remote = sock.getpeername()
- except Exception:
- remote = (None, None, None, None)
-
- return '<local: ' + _format_socket_endpoint(local) + ' - remote: ' + _format_socket_endpoint(remote) + '>'
- return '<no socket>'
-
-
-# noinspection PyProtectedMember
-class Connection(object):
- """Main ldap connection class.
-
- Controls, if used, must be a list of tuples. Each tuple must have 3
- elements, the control OID, a boolean meaning if the control is
- critical, a value.
-
- If the boolean is set to True the server must honor the control or
- refuse the operation
-
- Mixing controls must be defined in controls specification (as per
- RFC 4511)
-
- :param server: the Server object to connect to
- :type server: Server, str
- :param user: the user name for simple authentication
- :type user: str
- :param password: the password for simple authentication
- :type password: str
- :param auto_bind: specify if the bind will be performed automatically when defining the Connection object
- :type auto_bind: int, can be one of AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3
- :param version: LDAP version, default to 3
- :type version: int
- :param authentication: type of authentication
- :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3
- :param client_strategy: communication strategy used in the Connection
- :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3
- :param auto_referrals: specify if the connection object must automatically follow referrals
- :type auto_referrals: bool
- :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN'
- :type sasl_mechanism: str
- :param sasl_credentials: credentials for SASL mechanism
- :type sasl_credentials: tuple
- :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema
- :type check_names: bool
- :param collect_usage: collect usage metrics in the usage attribute
- :type collect_usage: bool
- :param read_only: disable operations that modify data in the LDAP server
- :type read_only: bool
- :param lazy: open and bind the connection only when an actual operation is performed
- :type lazy: bool
- :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions
- :type raise_exceptions: bool
- :param pool_name: pool name for pooled strategies
- :type pool_name: str
- :param pool_size: pool size for pooled strategies
- :type pool_size: int
- :param pool_lifetime: pool lifetime for pooled strategies
- :type pool_lifetime: int
- :param use_referral_cache: keep referral connections open and reuse them
- :type use_referral_cache: bool
- :param auto_escape: automatic escaping of filter values
- :param auto_encode: automatic encoding of attribute values
- :type use_referral_cache: bool
- """
-
- def __init__(self,
- server,
- user=None,
- password=None,
- auto_bind=AUTO_BIND_NONE,
- version=3,
- authentication=None,
- client_strategy=SYNC,
- auto_referrals=True,
- auto_range=True,
- sasl_mechanism=None,
- sasl_credentials=None,
- check_names=True,
- collect_usage=False,
- read_only=False,
- lazy=False,
- raise_exceptions=False,
- pool_name=None,
- pool_size=None,
- pool_lifetime=None,
- fast_decoder=True,
- receive_timeout=None,
- return_empty_attributes=True,
- use_referral_cache=False,
- auto_escape=True,
- auto_encode=True,
- pool_keepalive=None):
-
- conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
- self.connection_lock = RLock() # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread
- with self.connection_lock:
- if client_strategy not in CLIENT_STRATEGIES:
- self.last_error = 'unknown client connection strategy'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownStrategyError(self.last_error)
-
- self.strategy_type = client_strategy
- self.user = user
- self.password = password
-
- if not authentication and self.user:
- self.authentication = SIMPLE
- elif not authentication:
- self.authentication = ANONYMOUS
- elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
- self.authentication = authentication
- else:
- self.last_error = 'unknown authentication method'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownAuthenticationMethodError(self.last_error)
-
- self.version = version
- self.auto_referrals = True if auto_referrals else False
- self.request = None
- self.response = None
- self.result = None
- self.bound = False
- self.listening = False
- self.closed = True
- self.last_error = None
- if auto_bind is False: # compatibility with older version where auto_bind was a boolean
- self.auto_bind = AUTO_BIND_NONE
- elif auto_bind is True:
- self.auto_bind = AUTO_BIND_NO_TLS
- else:
- self.auto_bind = auto_bind
- self.sasl_mechanism = sasl_mechanism
- self.sasl_credentials = sasl_credentials
- self._usage = ConnectionUsage() if collect_usage else None
- self.socket = None
- self.tls_started = False
- self.sasl_in_progress = False
- self.read_only = read_only
- self._context_state = []
- self._deferred_open = False
- self._deferred_bind = False
- self._deferred_start_tls = False
- self._bind_controls = None
- self._executing_deferred = False
- self.lazy = lazy
- self.pool_name = pool_name if pool_name else conf_default_pool_name
- self.pool_size = pool_size
- self.pool_lifetime = pool_lifetime
- self.pool_keepalive = pool_keepalive
- self.starting_tls = False
- self.check_names = check_names
- self.raise_exceptions = raise_exceptions
- self.auto_range = True if auto_range else False
- self.extend = ExtendedOperationsRoot(self)
- self._entries = []
- self.fast_decoder = fast_decoder
- self.receive_timeout = receive_timeout
- self.empty_attributes = return_empty_attributes
- self.use_referral_cache = use_referral_cache
- self.auto_escape = auto_escape
- self.auto_encode = auto_encode
-
- if isinstance(server, STRING_TYPES):
- server = Server(server)
- if isinstance(server, SEQUENCE_TYPES):
- server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True)
-
- if isinstance(server, ServerPool):
- self.server_pool = server
- self.server_pool.initialize(self)
- self.server = self.server_pool.get_current_server(self)
- else:
- self.server_pool = None
- self.server = server
-
- # if self.authentication == SIMPLE and self.user and self.check_names:
- # self.user = safe_dn(self.user)
- # if log_enabled(EXTENDED):
- # log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self)
-
- if self.strategy_type == SYNC:
- self.strategy = SyncStrategy(self)
- elif self.strategy_type == ASYNC:
- self.strategy = AsyncStrategy(self)
- elif self.strategy_type == LDIF:
- self.strategy = LdifProducerStrategy(self)
- elif self.strategy_type == RESTARTABLE:
- self.strategy = RestartableStrategy(self)
- elif self.strategy_type == REUSABLE:
- self.strategy = ReusableStrategy(self)
- self.lazy = False
- elif self.strategy_type == MOCK_SYNC:
- self.strategy = MockSyncStrategy(self)
- elif self.strategy_type == MOCK_ASYNC:
- self.strategy = MockAsyncStrategy(self)
- elif self.strategy_type == ASYNC_STREAM:
- self.strategy = AsyncStreamStrategy(self)
- else:
- self.last_error = 'unknown strategy'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownStrategyError(self.last_error)
-
- # maps strategy functions to connection functions
- self.send = self.strategy.send
- self.open = self.strategy.open
- self.get_response = self.strategy.get_response
- self.post_send_single_response = self.strategy.post_send_single_response
- self.post_send_search = self.strategy.post_send_search
-
- if not self.strategy.no_real_dsa:
- self.do_auto_bind()
- # else: # for strategies with a fake server set get_info to NONE if server hasn't a schema
- # if self.server and not self.server.schema:
- # self.server.get_info = NONE
- if log_enabled(BASIC):
- if get_library_log_hide_sensitive_data():
- log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped())
- else:
- log(BASIC, 'instantiated Connection: <%r>', self)
-
- def do_auto_bind(self):
- if self.auto_bind and self.auto_bind != AUTO_BIND_NONE:
- if log_enabled(BASIC):
- log(BASIC, 'performing automatic bind for <%s>', self)
- if self.closed:
- self.open(read_server_info=False)
- if self.auto_bind == AUTO_BIND_NO_TLS:
- self.bind(read_server_info=True)
- elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
- self.start_tls(read_server_info=False)
- self.bind(read_server_info=True)
- elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
- self.bind(read_server_info=False)
- self.start_tls(read_server_info=True)
- if not self.bound:
- self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '')
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPBindError(self.last_error)
-
- def __str__(self):
- s = [
- str(self.server) if self.server else 'None',
- 'user: ' + str(self.user),
- 'lazy' if self.lazy else 'not lazy',
- 'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'),
- 'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'),
- _format_socket_endpoints(self.socket),
- 'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'),
- 'listening' if self.listening else 'not listening',
- self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy',
- 'internal decoder' if self.fast_decoder else 'pyasn1 decoder'
- ]
- return ' - '.join(s)
-
- def __repr__(self):
- conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
- if self.server_pool:
- r = 'Connection(server={0.server_pool!r}'.format(self)
- else:
- r = 'Connection(server={0.server!r}'.format(self)
- r += '' if self.user is None else ', user={0.user!r}'.format(self)
- r += '' if self.password is None else ', password={0.password!r}'.format(self)
- r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
- r += '' if self.version is None else ', version={0.version!r}'.format(self)
- r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
- r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
- r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
- r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
- r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
- r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
- r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False'))
- r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
- r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
- r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
- r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
- r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
- r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
- r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
- r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False'))
- r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
- r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
- r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False'))
- r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
- r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
- r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
- r += ')'
-
- return r
-
- def repr_with_sensitive_data_stripped(self):
- conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
- if self.server_pool:
- r = 'Connection(server={0.server_pool!r}'.format(self)
- else:
- r = 'Connection(server={0.server!r}'.format(self)
- r += '' if self.user is None else ', user={0.user!r}'.format(self)
- r += '' if self.password is None else ", password='{0}'".format('<stripped %d characters of sensitive data>' % len(self.password))
- r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
- r += '' if self.version is None else ', version={0.version!r}'.format(self)
- r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
- r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
- r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
- r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
- if self.sasl_mechanism == DIGEST_MD5:
- r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3])
- else:
- r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
- r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
- r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False')
- r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
- r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
- r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
- r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
- r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
- r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
- r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
- r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False')
- r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
- r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
- r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False')
- r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
- r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
- r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
- r += ')'
-
- return r
-
- @property
- def stream(self):
- """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header
- :return: reference to the response stream if defined in the strategy.
- """
- return self.strategy.get_stream() if self.strategy.can_stream else None
-
- @stream.setter
- def stream(self, value):
- with self.connection_lock:
- if self.strategy.can_stream:
- self.strategy.set_stream(value)
-
- @property
- def usage(self):
- """Usage statistics for the connection.
- :return: Usage object
- """
- if not self._usage:
- return None
- if self.strategy.pooled: # update master connection usage from pooled connections
- self._usage.reset()
- for worker in self.strategy.pool.workers:
- self._usage += worker.connection.usage
- self._usage += self.strategy.pool.terminated_usage
- return self._usage
-
- def __enter__(self):
- with self.connection_lock:
- self._context_state.append((self.bound, self.closed)) # save status out of context as a tuple in a list
- if self.closed:
- self.open()
- if not self.bound:
- self.bind()
-
- return self
-
- # noinspection PyUnusedLocal
- def __exit__(self, exc_type, exc_val, exc_tb):
- with self.connection_lock:
- context_bound, context_closed = self._context_state.pop()
- if (not context_bound and self.bound) or self.stream: # restore status prior to entering context
- try:
- self.unbind()
- except LDAPExceptionError:
- pass
-
- if not context_closed and self.closed:
- self.open()
-
- if exc_type is not None:
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', exc_type, self)
- return False # re-raise LDAPExceptionError
-
- def bind(self,
- read_server_info=True,
- controls=None):
- """Bind to ldap Server with the authentication method and the user defined in the connection
-
- :param read_server_info: reads info from server
- :param controls: LDAP controls to send along with the bind operation
- :type controls: list of tuple
- :return: bool
-
- """
- if log_enabled(BASIC):
- log(BASIC, 'start BIND operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- if self.lazy and not self._executing_deferred:
- if self.strategy.pooled:
- self.strategy.validate_bind(controls)
- self._deferred_bind = True
- self._bind_controls = controls
- self.bound = True
- if log_enabled(BASIC):
- log(BASIC, 'deferring bind for <%s>', self)
- else:
- self._deferred_bind = False
- self._bind_controls = None
- if self.closed: # try to open connection if closed
- self.open(read_server_info=False)
- if self.authentication == ANONYMOUS:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing anonymous BIND for <%s>', self)
- if not self.strategy.pooled:
- request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('bindRequest', request, controls))
- else:
- response = self.strategy.validate_bind(controls) # only for REUSABLE
- elif self.authentication == SIMPLE:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing simple BIND for <%s>', self)
- if not self.strategy.pooled:
- request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('bindRequest', request, controls))
- else:
- response = self.strategy.validate_bind(controls) # only for REUSABLE
- elif self.authentication == SASL:
- if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing SASL BIND for <%s>', self)
- if not self.strategy.pooled:
- response = self.do_sasl_bind(controls)
- else:
- response = self.strategy.validate_bind(controls) # only for REUSABLE
- else:
- self.last_error = 'requested SASL mechanism not supported'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPSASLMechanismNotSupportedError(self.last_error)
- elif self.authentication == NTLM:
- if self.user and self.password and len(self.user.split('\\')) == 2:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing NTLM BIND for <%s>', self)
- if not self.strategy.pooled:
- response = self.do_ntlm_bind(controls)
- else:
- response = self.strategy.validate_bind(controls) # only for REUSABLE
- else: # user or password missing
- self.last_error = 'NTLM needs domain\\username and a password'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownAuthenticationMethodError(self.last_error)
- else:
- self.last_error = 'unknown authentication method'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownAuthenticationMethodError(self.last_error)
-
- if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM): # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy
- _, result = self.get_response(response)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self)
- elif self.strategy.sync:
- result = self.result
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self)
- elif self.strategy.pooled or self.authentication in (SASL, NTLM): # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously
- result = response
- else:
- self.last_error = 'unknown authentication method'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownAuthenticationMethodError(self.last_error)
-
- if result is None:
- # self.bound = True if self.strategy_type == REUSABLE else False
- self.bound = False
- elif result is True:
- self.bound = True
- elif result is False:
- self.bound = False
- else:
- self.bound = True if result['result'] == RESULT_SUCCESS else False
- if not self.bound and result and result['description'] and not self.last_error:
- self.last_error = result['description']
-
- if read_server_info and self.bound:
- self.refresh_server_info()
- self._entries = []
-
- if log_enabled(BASIC):
- log(BASIC, 'done BIND operation, result <%s>', self.bound)
-
- return self.bound
-
- def rebind(self,
- user=None,
- password=None,
- authentication=None,
- sasl_mechanism=None,
- sasl_credentials=None,
- read_server_info=True,
- controls=None
- ):
-
- if log_enabled(BASIC):
- log(BASIC, 'start (RE)BIND operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- if user:
- self.user = user
- if password is not None:
- self.password = password
- if not authentication and user:
- self.authentication = SIMPLE
- if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
- self.authentication = authentication
- elif authentication is not None:
- self.last_error = 'unknown authentication method'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPUnknownAuthenticationMethodError(self.last_error)
- if sasl_mechanism:
- self.sasl_mechanism = sasl_mechanism
- if sasl_credentials:
- self.sasl_credentials = sasl_credentials
-
- # if self.authentication == SIMPLE and self.user and self.check_names:
- # self.user = safe_dn(self.user)
- # if log_enabled(EXTENDED):
- # log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self)
-
- if not self.strategy.pooled:
- try:
- return self.bind(read_server_info, controls)
- except LDAPSocketReceiveError:
- raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection')
- else:
- self.strategy.pool.rebind_pool()
- return True
-
- def unbind(self,
- controls=None):
- """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3)
-
- :param controls: LDAP controls to send along with the bind operation
-
- """
- if log_enabled(BASIC):
- log(BASIC, 'start UNBIND operation via <%s>', self)
-
- if self.use_referral_cache:
- self.strategy.unbind_referral_cache()
-
- self.last_error = None
- with self.connection_lock:
- if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open): # _clear deferred status
- self.strategy.close()
- self._deferred_open = False
- self._deferred_bind = False
- self._deferred_start_tls = False
- elif not self.closed:
- request = unbind_operation()
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'UNBIND request sent via <%s>', self)
- self.send('unbindRequest', request, controls)
- self.strategy.close()
-
- if log_enabled(BASIC):
- log(BASIC, 'done UNBIND operation, result <%s>', True)
-
- return True
-
- def search(self,
- search_base,
- search_filter,
- search_scope=SUBTREE,
- dereference_aliases=DEREF_ALWAYS,
- attributes=None,
- size_limit=0,
- time_limit=0,
- types_only=False,
- get_operational_attributes=False,
- controls=None,
- paged_size=None,
- paged_criticality=False,
- paged_cookie=None,
- auto_escape=None):
- """
- Perform an ldap search:
-
- - If attributes is empty noRFC2696 with the specified size
- - If paged is 0 and cookie is present the search is abandoned on
- server attribute is returned
- - If attributes is ALL_ATTRIBUTES all attributes are returned
- - If paged_size is an int greater than 0 a simple paged search
- is tried as described in
- - Cookie is an opaque string received in the last paged search
- and must be used on the next paged search response
- - If lazy == True open and bind will be deferred until another
- LDAP operation is performed
- - If mssing_attributes == True then an attribute not returned by the server is set to None
- - If auto_escape is set it overrides the Connection auto_escape
- """
- conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
- if log_enabled(BASIC):
- log(BASIC, 'start SEARCH operation via <%s>', self)
-
- if self.check_names and search_base:
- search_base = safe_dn(search_base)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self)
-
- with self.connection_lock:
- self._fire_deferred()
- if not attributes:
- attributes = [NO_ATTRIBUTES]
- elif attributes == ALL_ATTRIBUTES:
- attributes = [ALL_ATTRIBUTES]
-
- if isinstance(attributes, STRING_TYPES):
- attributes = [attributes]
-
- if get_operational_attributes and isinstance(attributes, list):
- attributes.append(ALL_OPERATIONAL_ATTRIBUTES)
- elif get_operational_attributes and isinstance(attributes, tuple):
- attributes += (ALL_OPERATIONAL_ATTRIBUTES, ) # concatenate tuple
-
- if isinstance(paged_size, int):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self)
-
- if controls is None:
- controls = []
- controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie))
-
- if self.server and self.server.schema and self.check_names:
- for attribute_name in attributes:
- if ';' in attribute_name: # remove tags
- attribute_name_to_check = attribute_name.split(';')[0]
- else:
- attribute_name_to_check = attribute_name
- if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
- raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
-
- request = search_operation(search_base,
- search_filter,
- search_scope,
- dereference_aliases,
- attributes,
- size_limit,
- time_limit,
- types_only,
- self.auto_escape if auto_escape is None else auto_escape,
- self.auto_encode,
- self.server.schema if self.server else None,
- check_names=self.check_names)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self)
- response = self.post_send_search(self.send('searchRequest', request, controls))
- self._entries = []
-
- if isinstance(response, int): # asynchronous strategy
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self)
- else:
- return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(PROTOCOL):
- for entry in response:
- if entry['type'] == 'searchResEntry':
- log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self)
- elif entry['type'] == 'searchResRef':
- log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self)
-
- if log_enabled(BASIC):
- log(BASIC, 'done SEARCH operation, result <%s>', return_value)
-
- return return_value
-
- def compare(self,
- dn,
- attribute,
- value,
- controls=None):
- """
- Perform a compare operation
- """
- conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
-
- if log_enabled(BASIC):
- log(BASIC, 'start COMPARE operation via <%s>', self)
- self.last_error = None
- if self.check_names:
- dn = safe_dn(dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self)
-
- if self.server and self.server.schema and self.check_names:
- if ';' in attribute: # remove tags for checking
- attribute_name_to_check = attribute.split(';')[0]
- else:
- attribute_name_to_check = attribute
-
- if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
- raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
-
- if isinstance(value, SEQUENCE_TYPES): # value can't be a sequence
- raise LDAPInvalidValueError('value cannot be a sequence')
-
- with self.connection_lock:
- self._fire_deferred()
- request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('compareRequest', request, controls))
- self._entries = []
- if isinstance(response, int):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self)
- else:
- return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False
- if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self)
-
- if log_enabled(BASIC):
- log(BASIC, 'done COMPARE operation, result <%s>', return_value)
-
- return return_value
-
- def add(self,
- dn,
- object_class=None,
- attributes=None,
- controls=None):
- """
- Add dn to the DIT, object_class is None, a class name or a list
- of class names.
-
- Attributes is a dictionary in the form 'attr': 'val' or 'attr':
- ['val1', 'val2', ...] for multivalued attributes
- """
- conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
- conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')]
- if log_enabled(BASIC):
- log(BASIC, 'start ADD operation via <%s>', self)
- self.last_error = None
- if self.check_names:
- dn = safe_dn(dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self)
-
- with self.connection_lock:
- self._fire_deferred()
- attr_object_class = []
- if object_class is None:
- parm_object_class = []
- else:
- parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class]
-
- object_class_attr_name = ''
- if attributes:
- for attr in attributes:
- if attr.lower() == 'objectclass':
- object_class_attr_name = attr
- attr_object_class = list(attributes[object_class_attr_name]) if isinstance(attributes[object_class_attr_name], SEQUENCE_TYPES) else [attributes[object_class_attr_name]]
- break
- else:
- attributes = dict()
-
- if not object_class_attr_name:
- object_class_attr_name = 'objectClass'
-
- attr_object_class = [to_unicode(object_class) for object_class in attr_object_class] # converts objectclass to unicode in case of bytes value
- attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, []) # remove duplicate ObjectClasses
-
- if not attributes[object_class_attr_name]:
- self.last_error = 'objectClass attribute is mandatory'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPObjectClassError(self.last_error)
-
- if self.server and self.server.schema and self.check_names:
- for object_class_name in attributes[object_class_attr_name]:
- if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes:
- raise LDAPObjectClassError('invalid object class ' + str(object_class_name))
-
- for attribute_name in attributes:
- if ';' in attribute_name: # remove tags for checking
- attribute_name_to_check = attribute_name.split(';')[0]
- else:
- attribute_name_to_check = attribute_name
-
- if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
- raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
-
- request = add_operation(dn, attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('addRequest', request, controls))
- self._entries = []
-
- if isinstance(response, STRING_TYPES + (int, )):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self)
- return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(BASIC):
- log(BASIC, 'done ADD operation, result <%s>', return_value)
-
- return return_value
-
- def delete(self,
- dn,
- controls=None):
- """
- Delete the entry identified by the DN from the DIB.
- """
- if log_enabled(BASIC):
- log(BASIC, 'start DELETE operation via <%s>', self)
- self.last_error = None
- if self.check_names:
- dn = safe_dn(dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self)
-
- with self.connection_lock:
- self._fire_deferred()
- if self.read_only:
- self.last_error = 'connection is read-only'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPConnectionIsReadOnlyError(self.last_error)
-
- request = delete_operation(dn)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('delRequest', request, controls))
- self._entries = []
-
- if isinstance(response, STRING_TYPES + (int, )):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self)
- return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(BASIC):
- log(BASIC, 'done DELETE operation, result <%s>', return_value)
-
- return return_value
-
- def modify(self,
- dn,
- changes,
- controls=None):
- """
- Modify attributes of entry
-
- - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...}
- - change is (operation, [value1, value2, ...])
- - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT)
- """
- conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
-
- if log_enabled(BASIC):
- log(BASIC, 'start MODIFY operation via <%s>', self)
- self.last_error = None
- if self.check_names:
- dn = safe_dn(dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self)
-
- with self.connection_lock:
- self._fire_deferred()
- if self.read_only:
- self.last_error = 'connection is read-only'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPConnectionIsReadOnlyError(self.last_error)
-
- if not isinstance(changes, dict):
- self.last_error = 'changes must be a dictionary'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPChangeError(self.last_error)
-
- if not changes:
- self.last_error = 'no changes in modify request'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPChangeError(self.last_error)
-
- for attribute_name in changes:
- if self.server and self.server.schema and self.check_names:
- if ';' in attribute_name: # remove tags for checking
- attribute_name_to_check = attribute_name.split(';')[0]
- else:
- attribute_name_to_check = attribute_name
-
- if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
- raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
- change = changes[attribute_name]
- if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
- if len(change) != 2:
- self.last_error = 'malformed change'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPChangeError(self.last_error)
-
- changes[attribute_name] = [change] # insert change in a tuple
- else:
- for change_operation in change:
- if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
- self.last_error = 'invalid change list'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPChangeError(self.last_error)
- request = modify_operation(dn, changes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('modifyRequest', request, controls))
- self._entries = []
-
- if isinstance(response, STRING_TYPES + (int, )):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self)
- return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(BASIC):
- log(BASIC, 'done MODIFY operation, result <%s>', return_value)
-
- return return_value
-
- def modify_dn(self,
- dn,
- relative_dn,
- delete_old_dn=True,
- new_superior=None,
- controls=None):
- """
- Modify DN of the entry or performs a move of the entry in the
- DIT.
- """
- if log_enabled(BASIC):
- log(BASIC, 'start MODIFY DN operation via <%s>', self)
- self.last_error = None
- if self.check_names:
- dn = safe_dn(dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self)
- relative_dn = safe_dn(relative_dn)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self)
-
- with self.connection_lock:
- self._fire_deferred()
- if self.read_only:
- self.last_error = 'connection is read-only'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPConnectionIsReadOnlyError(self.last_error)
-
- if new_superior and not dn.startswith(relative_dn): # as per RFC4511 (4.9)
- self.last_error = 'DN cannot change while performing moving'
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', self.last_error, self)
- raise LDAPChangeError(self.last_error)
-
- request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('modDNRequest', request, controls))
- self._entries = []
-
- if isinstance(response, STRING_TYPES + (int, )):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self)
- return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(BASIC):
- log(BASIC, 'done MODIFY DN operation, result <%s>', return_value)
-
- return return_value
-
- def abandon(self,
- message_id,
- controls=None):
- """
- Abandon the operation indicated by message_id
- """
- if log_enabled(BASIC):
- log(BASIC, 'start ABANDON operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- self._fire_deferred()
- return_value = False
- if self.strategy._outstanding or message_id == 0:
- # only current operation should be abandoned, abandon, bind and unbind cannot ever be abandoned,
- # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection
- if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0:
- request = abandon_operation(message_id)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self)
- self.send('abandonRequest', request, controls)
- self.result = None
- self.response = None
- self._entries = []
- return_value = True
- else:
- if log_enabled(ERROR):
- log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self)
-
- if log_enabled(BASIC):
- log(BASIC, 'done ABANDON operation, result <%s>', return_value)
-
- return return_value
-
- def extended(self,
- request_name,
- request_value=None,
- controls=None,
- no_encode=None):
- """
- Performs an extended operation
- """
- if log_enabled(BASIC):
- log(BASIC, 'start EXTENDED operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- self._fire_deferred()
- request = extended_operation(request_name, request_value, no_encode=no_encode)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self)
- response = self.post_send_single_response(self.send('extendedReq', request, controls))
- self._entries = []
- if isinstance(response, int):
- return_value = response
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self)
- return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False
- if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
- self.last_error = self.result['description']
-
- if log_enabled(BASIC):
- log(BASIC, 'done EXTENDED operation, result <%s>', return_value)
-
- return return_value
-
- def start_tls(self, read_server_info=True): # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0
-
- if log_enabled(BASIC):
- log(BASIC, 'start START TLS operation via <%s>', self)
-
- with self.connection_lock:
- return_value = False
- if not self.server.tls:
- self.server.tls = Tls()
-
- if self.lazy and not self._executing_deferred:
- self._deferred_start_tls = True
- self.tls_started = True
- return_value = True
- if log_enabled(BASIC):
- log(BASIC, 'deferring START TLS for <%s>', self)
- else:
- self._deferred_start_tls = False
- if self.server.tls.start_tls(self) and self.strategy.sync: # for asynchronous connections _start_tls is run by the strategy
- if read_server_info:
- self.refresh_server_info() # refresh server info as per RFC4515 (3.1.5)
- return_value = True
- elif not self.strategy.sync:
- return_value = True
-
- if log_enabled(BASIC):
- log(BASIC, 'done START TLS operation, result <%s>', return_value)
-
- return return_value
-
- def do_sasl_bind(self,
- controls):
- if log_enabled(BASIC):
- log(BASIC, 'start SASL BIND operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- result = None
-
- if not self.sasl_in_progress:
- self.sasl_in_progress = True
- try:
- if self.sasl_mechanism == EXTERNAL:
- result = sasl_external(self, controls)
- elif self.sasl_mechanism == DIGEST_MD5:
- result = sasl_digest_md5(self, controls)
- elif self.sasl_mechanism == GSSAPI:
- from ..protocol.sasl.kerberos import sasl_gssapi # needs the gssapi package
- result = sasl_gssapi(self, controls)
- elif self.sasl_mechanism == 'PLAIN':
- result = sasl_plain(self, controls)
- finally:
- self.sasl_in_progress = False
-
- if log_enabled(BASIC):
- log(BASIC, 'done SASL BIND operation, result <%s>', result)
-
- return result
-
- def do_ntlm_bind(self,
- controls):
- if log_enabled(BASIC):
- log(BASIC, 'start NTLM BIND operation via <%s>', self)
- self.last_error = None
- with self.connection_lock:
- result = None
- if not self.sasl_in_progress:
- self.sasl_in_progress = True # ntlm is same of sasl authentication
- # additional import for NTLM
- from ..utils.ntlm import NtlmClient
- domain_name, user_name = self.user.split('\\', 1)
- ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password)
-
- # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx
- # send a sicilyPackageDiscovery request (in the bindRequest)
- request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self)
- response = self.post_send_single_response(self.send('bindRequest', request, controls))
- if not self.strategy.sync:
- _, result = self.get_response(response)
- else:
- result = response[0]
- if 'server_creds' in result:
- sicily_packages = result['server_creds'].decode('ascii').split(';')
- if 'NTLM' in sicily_packages: # NTLM available on server
- request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client)
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self)
- response = self.post_send_single_response(self.send('bindRequest', request, controls))
- if not self.strategy.sync:
- _, result = self.get_response(response)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0], self)
- result = response[0]
-
- if result['result'] == RESULT_SUCCESS:
- request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client, result['server_creds'])
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self)
- response = self.post_send_single_response(self.send('bindRequest', request, controls))
- if not self.strategy.sync:
- _, result = self.get_response(response)
- else:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self)
- result = response[0]
- else:
- result = None
- self.sasl_in_progress = False
-
- if log_enabled(BASIC):
- log(BASIC, 'done SASL NTLM operation, result <%s>', result)
-
- return result
-
- def refresh_server_info(self):
- # if self.strategy.no_real_dsa: # do not refresh for mock strategies
- # return
-
- if not self.strategy.pooled:
- with self.connection_lock:
- if not self.closed:
- if log_enabled(BASIC):
- log(BASIC, 'refreshing server info for <%s>', self)
- previous_response = self.response
- previous_result = self.result
- previous_entries = self._entries
- self.server.get_info_from_server(self)
- self.response = previous_response
- self.result = previous_result
- self._entries = previous_entries
- else:
- if log_enabled(BASIC):
- log(BASIC, 'refreshing server info from pool for <%s>', self)
- self.strategy.pool.get_info_from_server()
-
- def response_to_ldif(self,
- search_result=None,
- all_base64=False,
- line_separator=None,
- sort_order=None,
- stream=None):
- with self.connection_lock:
- if search_result is None:
- search_result = self.response
-
- if isinstance(search_result, SEQUENCE_TYPES):
- ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order)
- ldif_lines = add_ldif_header(ldif_lines)
- line_separator = line_separator or linesep
- ldif_output = line_separator.join(ldif_lines)
- if stream:
- if stream.tell() == 0:
- header = add_ldif_header(['-'])[0]
- stream.write(prepare_for_stream(header + line_separator + line_separator))
- stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
- if log_enabled(BASIC):
- log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self)
- return ldif_output
-
- return None
-
- def response_to_json(self,
- raw=False,
- search_result=None,
- indent=4,
- sort=True,
- stream=None,
- checked_attributes=True,
- include_empty=True):
-
- with self.connection_lock:
- if search_result is None:
- search_result = self.response
-
- if isinstance(search_result, SEQUENCE_TYPES):
- json_dict = dict()
- json_dict['entries'] = []
-
- for response in search_result:
- if response['type'] == 'searchResEntry':
- entry = dict()
-
- entry['dn'] = response['dn']
- if checked_attributes:
- if not include_empty:
- # needed for python 2.6 compatibility
- entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key])
- else:
- entry['attributes'] = dict(response['attributes'])
- if raw:
- if not include_empty:
- # needed for python 2.6 compatibility
- entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key])
- else:
- entry['raw'] = dict(response['raw_attributes'])
- json_dict['entries'].append(entry)
-
- if str is bytes: # Python 2
- check_json_dict(json_dict)
-
- json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': '))
-
- if log_enabled(BASIC):
- log(BASIC, 'building JSON output <%s> for <%s>', json_output, self)
- if stream:
- stream.write(json_output)
-
- return json_output
-
- def response_to_file(self,
- target,
- raw=False,
- indent=4,
- sort=True):
- with self.connection_lock:
- if self.response:
- if isinstance(target, STRING_TYPES):
- target = open(target, 'w+')
-
- if log_enabled(BASIC):
- log(BASIC, 'writing response to file for <%s>', self)
-
- target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort))
- target.close()
-
- def _fire_deferred(self, read_info=True):
- with self.connection_lock:
- if self.lazy and not self._executing_deferred:
- self._executing_deferred = True
-
- if log_enabled(BASIC):
- log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self)
- try:
- if self._deferred_open:
- self.open(read_server_info=False)
- if self._deferred_start_tls:
- self.start_tls(read_server_info=False)
- if self._deferred_bind:
- self.bind(read_server_info=False, controls=self._bind_controls)
- if read_info:
- self.refresh_server_info()
- except LDAPExceptionError as e:
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', e, self)
- raise # re-raise LDAPExceptionError
- finally:
- self._executing_deferred = False
-
- @property
- def entries(self):
- if self.response:
- if not self._entries:
- self._entries = self._get_entries(self.response)
- return self._entries
-
- def _get_entries(self, search_response):
- with self.connection_lock:
- from .. import ObjectDef, Reader
-
- # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset
- attr_sets = []
- for response in search_response:
- if response['type'] == 'searchResEntry':
- resp_attr_set = set(response['attributes'].keys())
- if resp_attr_set not in attr_sets:
- attr_sets.append(resp_attr_set)
- attr_sets.sort(key=lambda x: -len(x)) # sorts the list in descending length order
- unique_attr_sets = []
- for attr_set in attr_sets:
- for unique_set in unique_attr_sets:
- if unique_set >= attr_set: # checks if unique set is a superset of attr_set
- break
- else: # the attr_set is not a subset of any element in unique_attr_sets
- unique_attr_sets.append(attr_set)
- object_defs = []
- for attr_set in unique_attr_sets:
- object_def = ObjectDef(schema=self.server.schema)
- object_def += list(attr_set) # converts the set in a list to be added to the object definition
- object_defs.append((attr_set,
- object_def,
- Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set))
- ) # objects_defs contains a tuple with the set, the ObjectDef and a cursor
-
- entries = []
- for response in search_response:
- if response['type'] == 'searchResEntry':
- resp_attr_set = set(response['attributes'].keys())
- for object_def in object_defs:
- if resp_attr_set <= object_def[0]: # finds the ObjectDef for the attribute set of this entry
- entry = object_def[2]._create_entry(response)
- entries.append(entry)
- break
- else:
- if log_enabled(ERROR):
- log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self)
- raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set))
-
- return entries
+"""
+"""
+
+# Created on 2014.05.31
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+from copy import deepcopy
+from os import linesep
+from threading import RLock, Lock
+from functools import reduce
+import json
+
+from .. import ANONYMOUS, SIMPLE, SASL, MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, get_config_parameter, DEREF_ALWAYS, \
+ SUBTREE, ASYNC, SYNC, NO_ATTRIBUTES, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, MODIFY_INCREMENT, LDIF, ASYNC_STREAM, \
+ RESTARTABLE, ROUND_ROBIN, REUSABLE, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_TLS_BEFORE_BIND,\
+ AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_NO_TLS, STRING_TYPES, SEQUENCE_TYPES, MOCK_SYNC, MOCK_ASYNC, NTLM, EXTERNAL,\
+ DIGEST_MD5, GSSAPI, PLAIN
+
+from .results import RESULT_SUCCESS, RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE
+from ..extend import ExtendedOperationsRoot
+from .pooling import ServerPool
+from .server import Server
+from ..operation.abandon import abandon_operation, abandon_request_to_dict
+from ..operation.add import add_operation, add_request_to_dict
+from ..operation.bind import bind_operation, bind_request_to_dict
+from ..operation.compare import compare_operation, compare_request_to_dict
+from ..operation.delete import delete_operation, delete_request_to_dict
+from ..operation.extended import extended_operation, extended_request_to_dict
+from ..operation.modify import modify_operation, modify_request_to_dict
+from ..operation.modifyDn import modify_dn_operation, modify_dn_request_to_dict
+from ..operation.search import search_operation, search_request_to_dict
+from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
+from ..protocol.sasl.digestMd5 import sasl_digest_md5
+from ..protocol.sasl.external import sasl_external
+from ..protocol.sasl.plain import sasl_plain
+from ..strategy.sync import SyncStrategy
+from ..strategy.mockAsync import MockAsyncStrategy
+from ..strategy.asynchronous import AsyncStrategy
+from ..strategy.reusable import ReusableStrategy
+from ..strategy.restartable import RestartableStrategy
+from ..strategy.ldifProducer import LdifProducerStrategy
+from ..strategy.mockSync import MockSyncStrategy
+from ..strategy.asyncStream import AsyncStreamStrategy
+from ..operation.unbind import unbind_operation
+from ..protocol.rfc2696 import paged_search_control
+from .usage import ConnectionUsage
+from .tls import Tls
+from .exceptions import LDAPUnknownStrategyError, LDAPBindError, LDAPUnknownAuthenticationMethodError, \
+ LDAPSASLMechanismNotSupportedError, LDAPObjectClassError, LDAPConnectionIsReadOnlyError, LDAPChangeError, LDAPExceptionError, \
+ LDAPObjectError, LDAPSocketReceiveError, LDAPAttributeError, LDAPInvalidValueError, LDAPConfigurationError, \
+ LDAPInvalidPortError
+
+from ..utils.conv import escape_bytes, prepare_for_stream, check_json_dict, format_json, to_unicode
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, EXTENDED, get_library_log_hide_sensitive_data
+from ..utils.dn import safe_dn
+from ..utils.port_validators import check_port_and_port_list
+
+
+SASL_AVAILABLE_MECHANISMS = [EXTERNAL,
+ DIGEST_MD5,
+ GSSAPI,
+ PLAIN]
+
+CLIENT_STRATEGIES = [SYNC,
+ ASYNC,
+ LDIF,
+ RESTARTABLE,
+ REUSABLE,
+ MOCK_SYNC,
+ MOCK_ASYNC,
+ ASYNC_STREAM]
+
+
+def _format_socket_endpoint(endpoint):
+ if endpoint and len(endpoint) == 2: # IPv4
+ return str(endpoint[0]) + ':' + str(endpoint[1])
+ elif endpoint and len(endpoint) == 4: # IPv6
+ return '[' + str(endpoint[0]) + ']:' + str(endpoint[1])
+
+ try:
+ return str(endpoint)
+ except Exception:
+ return '?'
+
+
+def _format_socket_endpoints(sock):
+ if sock:
+ try:
+ local = sock.getsockname()
+ except Exception:
+ local = (None, None, None, None)
+ try:
+ remote = sock.getpeername()
+ except Exception:
+ remote = (None, None, None, None)
+
+ return '<local: ' + _format_socket_endpoint(local) + ' - remote: ' + _format_socket_endpoint(remote) + '>'
+ return '<no socket>'
+
+
+# noinspection PyProtectedMember
+class Connection(object):
+ """Main ldap connection class.
+
+ Controls, if used, must be a list of tuples. Each tuple must have 3
+ elements, the control OID, a boolean meaning if the control is
+ critical, a value.
+
+ If the boolean is set to True the server must honor the control or
+ refuse the operation
+
+ Mixing controls must be defined in controls specification (as per
+ RFC 4511)
+
+ :param server: the Server object to connect to
+ :type server: Server, str
+ :param user: the user name for simple authentication
+ :type user: str
+ :param password: the password for simple authentication
+ :type password: str
+ :param auto_bind: specify if the bind will be performed automatically when defining the Connection object
+ :type auto_bind: int, can be one of AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_BEFORE_BIND, AUTO_BIND_TLS_AFTER_BIND as specified in ldap3
+ :param version: LDAP version, default to 3
+ :type version: int
+ :param authentication: type of authentication
+ :type authentication: int, can be one of AUTH_ANONYMOUS, AUTH_SIMPLE or AUTH_SASL, as specified in ldap3
+ :param client_strategy: communication strategy used in the Connection
+ :type client_strategy: can be one of STRATEGY_SYNC, STRATEGY_ASYNC_THREADED, STRATEGY_LDIF_PRODUCER, STRATEGY_SYNC_RESTARTABLE, STRATEGY_REUSABLE_THREADED as specified in ldap3
+ :param auto_referrals: specify if the connection object must automatically follow referrals
+ :type auto_referrals: bool
+ :param sasl_mechanism: mechanism for SASL authentication, can be one of 'EXTERNAL', 'DIGEST-MD5', 'GSSAPI', 'PLAIN'
+ :type sasl_mechanism: str
+ :param sasl_credentials: credentials for SASL mechanism
+ :type sasl_credentials: tuple
+ :param check_names: if True the library will check names of attributes and object classes against the schema. Also values found in entries will be formatted as indicated by the schema
+ :type check_names: bool
+ :param collect_usage: collect usage metrics in the usage attribute
+ :type collect_usage: bool
+ :param read_only: disable operations that modify data in the LDAP server
+ :type read_only: bool
+ :param lazy: open and bind the connection only when an actual operation is performed
+ :type lazy: bool
+ :param raise_exceptions: raise exceptions when operations are not successful, if False operations return False if not successful but not raise exceptions
+ :type raise_exceptions: bool
+ :param pool_name: pool name for pooled strategies
+ :type pool_name: str
+ :param pool_size: pool size for pooled strategies
+ :type pool_size: int
+ :param pool_lifetime: pool lifetime for pooled strategies
+ :type pool_lifetime: int
+ :param cred_store: credential store for gssapi
+ :type cred_store: dict
+ :param use_referral_cache: keep referral connections open and reuse them
+ :type use_referral_cache: bool
+ :param auto_escape: automatic escaping of filter values
+ :type auto_escape: bool
+ :param auto_encode: automatic encoding of attribute values
+ :type auto_encode: bool
+ :param source_address: the ip address or hostname to use as the source when opening the connection to the server
+ :type source_address: str
+ :param source_port: the source port to use when opening the connection to the server. Cannot be specified with source_port_list
+ :type source_port: int
+ :param source_port_list: a list of source ports to choose from when opening the connection to the server. Cannot be specified with source_port
+ :type source_port_list: list
+ """
+
+ def __init__(self,
+ server,
+ user=None,
+ password=None,
+ auto_bind=AUTO_BIND_DEFAULT,
+ version=3,
+ authentication=None,
+ client_strategy=SYNC,
+ auto_referrals=True,
+ auto_range=True,
+ sasl_mechanism=None,
+ sasl_credentials=None,
+ check_names=True,
+ collect_usage=False,
+ read_only=False,
+ lazy=False,
+ raise_exceptions=False,
+ pool_name=None,
+ pool_size=None,
+ pool_lifetime=None,
+ cred_store=None,
+ fast_decoder=True,
+ receive_timeout=None,
+ return_empty_attributes=True,
+ use_referral_cache=False,
+ auto_escape=True,
+ auto_encode=True,
+ pool_keepalive=None,
+ source_address=None,
+ source_port=None,
+ source_port_list=None):
+
+ conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
+ self.connection_lock = RLock() # re-entrant lock to ensure that operations in the Connection object are executed atomically in the same thread
+ with self.connection_lock:
+ if client_strategy not in CLIENT_STRATEGIES:
+ self.last_error = 'unknown client connection strategy'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownStrategyError(self.last_error)
+
+ self.strategy_type = client_strategy
+ self.user = user
+ self.password = password
+
+ if not authentication and self.user:
+ self.authentication = SIMPLE
+ elif not authentication:
+ self.authentication = ANONYMOUS
+ elif authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
+ self.authentication = authentication
+ else:
+ self.last_error = 'unknown authentication method'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownAuthenticationMethodError(self.last_error)
+
+ self.version = version
+ self.auto_referrals = True if auto_referrals else False
+ self.request = None
+ self.response = None
+ self.result = None
+ self.bound = False
+ self.listening = False
+ self.closed = True
+ self.last_error = None
+ if auto_bind is False: # compatibility with older version where auto_bind was a boolean
+ self.auto_bind = AUTO_BIND_DEFAULT
+ elif auto_bind is True:
+ self.auto_bind = AUTO_BIND_NO_TLS
+ else:
+ self.auto_bind = auto_bind
+ self.sasl_mechanism = sasl_mechanism
+ self.sasl_credentials = sasl_credentials
+ self._usage = ConnectionUsage() if collect_usage else None
+ self.socket = None
+ self.tls_started = False
+ self.sasl_in_progress = False
+ self.read_only = read_only
+ self._context_state = []
+ self._deferred_open = False
+ self._deferred_bind = False
+ self._deferred_start_tls = False
+ self._bind_controls = None
+ self._executing_deferred = False
+ self.lazy = lazy
+ self.pool_name = pool_name if pool_name else conf_default_pool_name
+ self.pool_size = pool_size
+ self.cred_store = cred_store
+ self.pool_lifetime = pool_lifetime
+ self.pool_keepalive = pool_keepalive
+ self.starting_tls = False
+ self.check_names = check_names
+ self.raise_exceptions = raise_exceptions
+ self.auto_range = True if auto_range else False
+ self.extend = ExtendedOperationsRoot(self)
+ self._entries = []
+ self.fast_decoder = fast_decoder
+ self.receive_timeout = receive_timeout
+ self.empty_attributes = return_empty_attributes
+ self.use_referral_cache = use_referral_cache
+ self.auto_escape = auto_escape
+ self.auto_encode = auto_encode
+
+ port_err = check_port_and_port_list(source_port, source_port_list)
+ if port_err:
+ if log_enabled(ERROR):
+ log(ERROR, port_err)
+ raise LDAPInvalidPortError(port_err)
+ # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket
+ # binding requires that you pass something for the ip if you want to pass a specific port
+ self.source_address = source_address if source_address is not None else ''
+ # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from
+ # all ports as if this wasn't provided" because socket binding requires that you pass something for the port
+ # if you want to pass a specific ip
+ self.source_port_list = [0]
+ if source_port is not None:
+ self.source_port_list = [source_port]
+ elif source_port_list is not None:
+ self.source_port_list = source_port_list[:]
+
+ if isinstance(server, STRING_TYPES):
+ server = Server(server)
+ if isinstance(server, SEQUENCE_TYPES):
+ server = ServerPool(server, ROUND_ROBIN, active=True, exhaust=True)
+
+ if isinstance(server, ServerPool):
+ self.server_pool = server
+ self.server_pool.initialize(self)
+ self.server = self.server_pool.get_current_server(self)
+ else:
+ self.server_pool = None
+ self.server = server
+
+ # if self.authentication == SIMPLE and self.user and self.check_names:
+ # self.user = safe_dn(self.user)
+ # if log_enabled(EXTENDED):
+ # log(EXTENDED, 'user name sanitized to <%s> for simple authentication via <%s>', self.user, self)
+
+ if self.strategy_type == SYNC:
+ self.strategy = SyncStrategy(self)
+ elif self.strategy_type == ASYNC:
+ self.strategy = AsyncStrategy(self)
+ elif self.strategy_type == LDIF:
+ self.strategy = LdifProducerStrategy(self)
+ elif self.strategy_type == RESTARTABLE:
+ self.strategy = RestartableStrategy(self)
+ elif self.strategy_type == REUSABLE:
+ self.strategy = ReusableStrategy(self)
+ self.lazy = False
+ elif self.strategy_type == MOCK_SYNC:
+ self.strategy = MockSyncStrategy(self)
+ elif self.strategy_type == MOCK_ASYNC:
+ self.strategy = MockAsyncStrategy(self)
+ elif self.strategy_type == ASYNC_STREAM:
+ self.strategy = AsyncStreamStrategy(self)
+ else:
+ self.last_error = 'unknown strategy'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownStrategyError(self.last_error)
+
+ # maps strategy functions to connection functions
+ self.send = self.strategy.send
+ self.open = self.strategy.open
+ self.get_response = self.strategy.get_response
+ self.post_send_single_response = self.strategy.post_send_single_response
+ self.post_send_search = self.strategy.post_send_search
+
+ if not self.strategy.no_real_dsa:
+ self.do_auto_bind()
+ # else: # for strategies with a fake server set get_info to NONE if server hasn't a schema
+ # if self.server and not self.server.schema:
+ # self.server.get_info = NONE
+ if log_enabled(BASIC):
+ if get_library_log_hide_sensitive_data():
+ log(BASIC, 'instantiated Connection: <%s>', self.repr_with_sensitive_data_stripped())
+ else:
+ log(BASIC, 'instantiated Connection: <%r>', self)
+
+ def do_auto_bind(self):
+ if self.auto_bind and self.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
+ if log_enabled(BASIC):
+ log(BASIC, 'performing automatic bind for <%s>', self)
+ if self.closed:
+ self.open(read_server_info=False)
+ if self.auto_bind == AUTO_BIND_NO_TLS:
+ self.bind(read_server_info=True)
+ elif self.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
+ self.start_tls(read_server_info=False)
+ self.bind(read_server_info=True)
+ elif self.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
+ self.bind(read_server_info=False)
+ self.start_tls(read_server_info=True)
+ if not self.bound:
+ self.last_error = 'automatic bind not successful' + (' - ' + self.last_error if self.last_error else '')
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ self.unbind()
+ raise LDAPBindError(self.last_error)
+
+ def __str__(self):
+ s = [
+ str(self.server) if self.server else 'None',
+ 'user: ' + str(self.user),
+ 'lazy' if self.lazy else 'not lazy',
+ 'unbound' if not self.bound else ('deferred bind' if self._deferred_bind else 'bound'),
+ 'closed' if self.closed else ('deferred open' if self._deferred_open else 'open'),
+ _format_socket_endpoints(self.socket),
+ 'tls not started' if not self.tls_started else('deferred start_tls' if self._deferred_start_tls else 'tls started'),
+ 'listening' if self.listening else 'not listening',
+ self.strategy.__class__.__name__ if hasattr(self, 'strategy') else 'No strategy',
+ 'internal decoder' if self.fast_decoder else 'pyasn1 decoder'
+ ]
+ return ' - '.join(s)
+
+ def __repr__(self):
+ conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
+ if self.server_pool:
+ r = 'Connection(server={0.server_pool!r}'.format(self)
+ else:
+ r = 'Connection(server={0.server!r}'.format(self)
+ r += '' if self.user is None else ', user={0.user!r}'.format(self)
+ r += '' if self.password is None else ', password={0.password!r}'.format(self)
+ r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
+ r += '' if self.version is None else ', version={0.version!r}'.format(self)
+ r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
+ r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
+ r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
+ r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
+ r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
+ r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
+ r += '' if self.usage is None else (', collect_usage=' + ('True' if self.usage else 'False'))
+ r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
+ r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
+ r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
+ r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
+ r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
+ r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
+ r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
+ r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store))
+ r += '' if self.fast_decoder is None else (', fast_decoder=' + ('True' if self.fast_decoder else 'False'))
+ r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
+ r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
+ r += '' if self.empty_attributes is None else (', return_empty_attributes=' + ('True' if self.empty_attributes else 'False'))
+ r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
+ r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
+ r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
+ r += ')'
+
+ return r
+
+ def repr_with_sensitive_data_stripped(self):
+ conf_default_pool_name = get_config_parameter('DEFAULT_THREADED_POOL_NAME')
+ if self.server_pool:
+ r = 'Connection(server={0.server_pool!r}'.format(self)
+ else:
+ r = 'Connection(server={0.server!r}'.format(self)
+ r += '' if self.user is None else ', user={0.user!r}'.format(self)
+ r += '' if self.password is None else ", password='{0}'".format('<stripped %d characters of sensitive data>' % len(self.password))
+ r += '' if self.auto_bind is None else ', auto_bind={0.auto_bind!r}'.format(self)
+ r += '' if self.version is None else ', version={0.version!r}'.format(self)
+ r += '' if self.authentication is None else ', authentication={0.authentication!r}'.format(self)
+ r += '' if self.strategy_type is None else ', client_strategy={0.strategy_type!r}'.format(self)
+ r += '' if self.auto_referrals is None else ', auto_referrals={0.auto_referrals!r}'.format(self)
+ r += '' if self.sasl_mechanism is None else ', sasl_mechanism={0.sasl_mechanism!r}'.format(self)
+ if self.sasl_mechanism == DIGEST_MD5:
+ r += '' if self.sasl_credentials is None else ", sasl_credentials=({0!r}, {1!r}, '{2}', {3!r})".format(self.sasl_credentials[0], self.sasl_credentials[1], '*' * len(self.sasl_credentials[2]), self.sasl_credentials[3])
+ else:
+ r += '' if self.sasl_credentials is None else ', sasl_credentials={0.sasl_credentials!r}'.format(self)
+ r += '' if self.check_names is None else ', check_names={0.check_names!r}'.format(self)
+ r += '' if self.usage is None else (', collect_usage=' + 'True' if self.usage else 'False')
+ r += '' if self.read_only is None else ', read_only={0.read_only!r}'.format(self)
+ r += '' if self.lazy is None else ', lazy={0.lazy!r}'.format(self)
+ r += '' if self.raise_exceptions is None else ', raise_exceptions={0.raise_exceptions!r}'.format(self)
+ r += '' if (self.pool_name is None or self.pool_name == conf_default_pool_name) else ', pool_name={0.pool_name!r}'.format(self)
+ r += '' if self.pool_size is None else ', pool_size={0.pool_size!r}'.format(self)
+ r += '' if self.pool_lifetime is None else ', pool_lifetime={0.pool_lifetime!r}'.format(self)
+ r += '' if self.pool_keepalive is None else ', pool_keepalive={0.pool_keepalive!r}'.format(self)
+ r += '' if self.cred_store is None else (', cred_store=' + repr(self.cred_store))
+ r += '' if self.fast_decoder is None else (', fast_decoder=' + 'True' if self.fast_decoder else 'False')
+ r += '' if self.auto_range is None else (', auto_range=' + ('True' if self.auto_range else 'False'))
+ r += '' if self.receive_timeout is None else ', receive_timeout={0.receive_timeout!r}'.format(self)
+ r += '' if self.empty_attributes is None else (', return_empty_attributes=' + 'True' if self.empty_attributes else 'False')
+ r += '' if self.auto_encode is None else (', auto_encode=' + ('True' if self.auto_encode else 'False'))
+ r += '' if self.auto_escape is None else (', auto_escape=' + ('True' if self.auto_escape else 'False'))
+ r += '' if self.use_referral_cache is None else (', use_referral_cache=' + ('True' if self.use_referral_cache else 'False'))
+ r += ')'
+
+ return r
+
+ @property
+ def stream(self):
+ """Used by the LDIFProducer strategy to accumulate the ldif-change operations with a single LDIF header
+ :return: reference to the response stream if defined in the strategy.
+ """
+ return self.strategy.get_stream() if self.strategy.can_stream else None
+
+ @stream.setter
+ def stream(self, value):
+ with self.connection_lock:
+ if self.strategy.can_stream:
+ self.strategy.set_stream(value)
+
+ @property
+ def usage(self):
+ """Usage statistics for the connection.
+ :return: Usage object
+ """
+ if not self._usage:
+ return None
+ if self.strategy.pooled: # update master connection usage from pooled connections
+ self._usage.reset()
+ for worker in self.strategy.pool.workers:
+ self._usage += worker.connection.usage
+ self._usage += self.strategy.pool.terminated_usage
+ return self._usage
+
+ def __enter__(self):
+ with self.connection_lock:
+ self._context_state.append((self.bound, self.closed)) # save status out of context as a tuple in a list
+ if self.auto_bind != AUTO_BIND_NONE:
+ if self.auto_bind == AUTO_BIND_DEFAULT:
+ self.auto_bind = AUTO_BIND_NO_TLS
+ if self.closed:
+ self.open()
+ if not self.bound:
+ self.bind()
+
+ return self
+
+ # noinspection PyUnusedLocal
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ with self.connection_lock:
+ context_bound, context_closed = self._context_state.pop()
+ if (not context_bound and self.bound) or self.stream: # restore status prior to entering context
+ try:
+ self.unbind()
+ except LDAPExceptionError:
+ pass
+
+ if not context_closed and self.closed:
+ self.open()
+
+ if exc_type is not None:
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', exc_type, self)
+ return False # re-raise LDAPExceptionError
+
+ def bind(self,
+ read_server_info=True,
+ controls=None):
+ """Bind to ldap Server with the authentication method and the user defined in the connection
+
+ :param read_server_info: reads info from server
+ :param controls: LDAP controls to send along with the bind operation
+ :type controls: list of tuple
+ :return: bool
+
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start BIND operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ if self.lazy and not self._executing_deferred:
+ if self.strategy.pooled:
+ self.strategy.validate_bind(controls)
+ self._deferred_bind = True
+ self._bind_controls = controls
+ self.bound = True
+ if log_enabled(BASIC):
+ log(BASIC, 'deferring bind for <%s>', self)
+ else:
+ self._deferred_bind = False
+ self._bind_controls = None
+ if self.closed: # try to open connection if closed
+ self.open(read_server_info=False)
+ if self.authentication == ANONYMOUS:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing anonymous BIND for <%s>', self)
+ if not self.strategy.pooled:
+ request = bind_operation(self.version, self.authentication, self.user, '', auto_encode=self.auto_encode)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'anonymous BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('bindRequest', request, controls))
+ else:
+ response = self.strategy.validate_bind(controls) # only for REUSABLE
+ elif self.authentication == SIMPLE:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing simple BIND for <%s>', self)
+ if not self.strategy.pooled:
+ request = bind_operation(self.version, self.authentication, self.user, self.password, auto_encode=self.auto_encode)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'simple BIND request <%s> sent via <%s>', bind_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('bindRequest', request, controls))
+ else:
+ response = self.strategy.validate_bind(controls) # only for REUSABLE
+ elif self.authentication == SASL:
+ if self.sasl_mechanism in SASL_AVAILABLE_MECHANISMS:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing SASL BIND for <%s>', self)
+ if not self.strategy.pooled:
+ response = self.do_sasl_bind(controls)
+ else:
+ response = self.strategy.validate_bind(controls) # only for REUSABLE
+ else:
+ self.last_error = 'requested SASL mechanism not supported'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPSASLMechanismNotSupportedError(self.last_error)
+ elif self.authentication == NTLM:
+ if self.user and self.password and len(self.user.split('\\')) == 2:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing NTLM BIND for <%s>', self)
+ if not self.strategy.pooled:
+ response = self.do_ntlm_bind(controls)
+ else:
+ response = self.strategy.validate_bind(controls) # only for REUSABLE
+ else: # user or password missing
+ self.last_error = 'NTLM needs domain\\username and a password'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownAuthenticationMethodError(self.last_error)
+ else:
+ self.last_error = 'unknown authentication method'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownAuthenticationMethodError(self.last_error)
+
+ if not self.strategy.sync and not self.strategy.pooled and self.authentication not in (SASL, NTLM): # get response if asynchronous except for SASL and NTLM that return the bind result even for asynchronous strategy
+ _, result = self.get_response(response)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async BIND response id <%s> received via <%s>', result, self)
+ elif self.strategy.sync:
+ result = self.result
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'BIND response <%s> received via <%s>', result, self)
+ elif self.strategy.pooled or self.authentication in (SASL, NTLM): # asynchronous SASL and NTLM or reusable strtegy get the bind result synchronously
+ result = response
+ else:
+ self.last_error = 'unknown authentication method'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownAuthenticationMethodError(self.last_error)
+
+ if result is None:
+ # self.bound = True if self.strategy_type == REUSABLE else False
+ self.bound = False
+ elif result is True:
+ self.bound = True
+ elif result is False:
+ self.bound = False
+ else:
+ self.bound = True if result['result'] == RESULT_SUCCESS else False
+ if not self.bound and result and result['description'] and not self.last_error:
+ self.last_error = result['description']
+
+ if read_server_info and self.bound:
+ self.refresh_server_info()
+ self._entries = []
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done BIND operation, result <%s>', self.bound)
+
+ return self.bound
+
+ def rebind(self,
+ user=None,
+ password=None,
+ authentication=None,
+ sasl_mechanism=None,
+ sasl_credentials=None,
+ read_server_info=True,
+ controls=None
+ ):
+
+ if log_enabled(BASIC):
+ log(BASIC, 'start (RE)BIND operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ if user:
+ self.user = user
+ if password is not None:
+ self.password = password
+ if not authentication and user:
+ self.authentication = SIMPLE
+ if authentication in [SIMPLE, ANONYMOUS, SASL, NTLM]:
+ self.authentication = authentication
+ elif authentication is not None:
+ self.last_error = 'unknown authentication method'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPUnknownAuthenticationMethodError(self.last_error)
+ if sasl_mechanism:
+ self.sasl_mechanism = sasl_mechanism
+ if sasl_credentials:
+ self.sasl_credentials = sasl_credentials
+
+ # if self.authentication == SIMPLE and self.user and self.check_names:
+ # self.user = safe_dn(self.user)
+ # if log_enabled(EXTENDED):
+ # log(EXTENDED, 'user name sanitized to <%s> for rebind via <%s>', self.user, self)
+
+ if not self.strategy.pooled:
+ try:
+ return self.bind(read_server_info, controls)
+ except LDAPSocketReceiveError:
+ raise LDAPBindError('Unable to rebind as a different user, furthermore the server abruptly closed the connection')
+ else:
+ self.strategy.pool.rebind_pool()
+ return True
+
+ def unbind(self,
+ controls=None):
+ """Unbind the connected user. Unbind implies closing session as per RFC4511 (4.3)
+
+ :param controls: LDAP controls to send along with the bind operation
+
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start UNBIND operation via <%s>', self)
+
+ if self.use_referral_cache:
+ self.strategy.unbind_referral_cache()
+
+ self.last_error = None
+ with self.connection_lock:
+ if self.lazy and not self._executing_deferred and (self._deferred_bind or self._deferred_open): # _clear deferred status
+ self.strategy.close()
+ self._deferred_open = False
+ self._deferred_bind = False
+ self._deferred_start_tls = False
+ elif not self.closed:
+ request = unbind_operation()
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'UNBIND request sent via <%s>', self)
+ self.send('unbindRequest', request, controls)
+ self.strategy.close()
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done UNBIND operation, result <%s>', True)
+
+ return True
+
+ def search(self,
+ search_base,
+ search_filter,
+ search_scope=SUBTREE,
+ dereference_aliases=DEREF_ALWAYS,
+ attributes=None,
+ size_limit=0,
+ time_limit=0,
+ types_only=False,
+ get_operational_attributes=False,
+ controls=None,
+ paged_size=None,
+ paged_criticality=False,
+ paged_cookie=None,
+ auto_escape=None):
+ """
+ Perform an ldap search:
+
+ - If attributes is empty noRFC2696 with the specified size
+ - If paged is 0 and cookie is present the search is abandoned on
+ server attribute is returned
+ - If attributes is ALL_ATTRIBUTES all attributes are returned
+ - If paged_size is an int greater than 0 a simple paged search
+ is tried as described in
+ - Cookie is an opaque string received in the last paged search
+ and must be used on the next paged search response
+ - If lazy == True open and bind will be deferred until another
+ LDAP operation is performed
+ - If mssing_attributes == True then an attribute not returned by the server is set to None
+ - If auto_escape is set it overrides the Connection auto_escape
+ """
+ conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
+ if log_enabled(BASIC):
+ log(BASIC, 'start SEARCH operation via <%s>', self)
+
+ if self.check_names and search_base:
+ search_base = safe_dn(search_base)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'search base sanitized to <%s> for SEARCH operation via <%s>', search_base, self)
+
+ with self.connection_lock:
+ self._fire_deferred()
+ if not attributes:
+ attributes = [NO_ATTRIBUTES]
+ elif attributes == ALL_ATTRIBUTES:
+ attributes = [ALL_ATTRIBUTES]
+
+ if isinstance(attributes, STRING_TYPES):
+ attributes = [attributes]
+
+ if get_operational_attributes and isinstance(attributes, list):
+ attributes.append(ALL_OPERATIONAL_ATTRIBUTES)
+ elif get_operational_attributes and isinstance(attributes, tuple):
+ attributes += (ALL_OPERATIONAL_ATTRIBUTES, ) # concatenate tuple
+
+ if isinstance(paged_size, int):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing paged search for %d items with cookie <%s> for <%s>', paged_size, escape_bytes(paged_cookie), self)
+
+ if controls is None:
+ controls = []
+ else:
+ # Copy the controls to prevent modifying the original object
+ controls = list(controls)
+ controls.append(paged_search_control(paged_criticality, paged_size, paged_cookie))
+
+ if self.server and self.server.schema and self.check_names:
+ for attribute_name in attributes:
+ if ';' in attribute_name: # remove tags
+ attribute_name_to_check = attribute_name.split(';')[0]
+ else:
+ attribute_name_to_check = attribute_name
+ if self.server.schema and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
+ raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
+
+ request = search_operation(search_base,
+ search_filter,
+ search_scope,
+ dereference_aliases,
+ attributes,
+ size_limit,
+ time_limit,
+ types_only,
+ self.auto_escape if auto_escape is None else auto_escape,
+ self.auto_encode,
+ self.server.schema if self.server else None,
+ validator=self.server.custom_validator,
+ check_names=self.check_names)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'SEARCH request <%s> sent via <%s>', search_request_to_dict(request), self)
+ response = self.post_send_search(self.send('searchRequest', request, controls))
+ self._entries = []
+
+ if isinstance(response, int): # asynchronous strategy
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async SEARCH response id <%s> received via <%s>', return_value, self)
+ else:
+ return_value = True if self.result['type'] == 'searchResDone' and len(response) > 0 else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(PROTOCOL):
+ for entry in response:
+ if entry['type'] == 'searchResEntry':
+ log(PROTOCOL, 'SEARCH response entry <%s> received via <%s>', entry, self)
+ elif entry['type'] == 'searchResRef':
+ log(PROTOCOL, 'SEARCH response reference <%s> received via <%s>', entry, self)
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done SEARCH operation, result <%s>', return_value)
+
+ return return_value
+
+ def compare(self,
+ dn,
+ attribute,
+ value,
+ controls=None):
+ """
+ Perform a compare operation
+ """
+ conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
+
+ if log_enabled(BASIC):
+ log(BASIC, 'start COMPARE operation via <%s>', self)
+ self.last_error = None
+ if self.check_names:
+ dn = safe_dn(dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'dn sanitized to <%s> for COMPARE operation via <%s>', dn, self)
+
+ if self.server and self.server.schema and self.check_names:
+ if ';' in attribute: # remove tags for checking
+ attribute_name_to_check = attribute.split(';')[0]
+ else:
+ attribute_name_to_check = attribute
+
+ if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
+ raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
+
+ if isinstance(value, SEQUENCE_TYPES): # value can't be a sequence
+ raise LDAPInvalidValueError('value cannot be a sequence')
+
+ with self.connection_lock:
+ self._fire_deferred()
+ request = compare_operation(dn, attribute, value, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'COMPARE request <%s> sent via <%s>', compare_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('compareRequest', request, controls))
+ self._entries = []
+ if isinstance(response, int):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async COMPARE response id <%s> received via <%s>', return_value, self)
+ else:
+ return_value = True if self.result['type'] == 'compareResponse' and self.result['result'] == RESULT_COMPARE_TRUE else False
+ if not return_value and self.result['result'] not in [RESULT_COMPARE_TRUE, RESULT_COMPARE_FALSE] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'COMPARE response <%s> received via <%s>', response, self)
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done COMPARE operation, result <%s>', return_value)
+
+ return return_value
+
+ def add(self,
+ dn,
+ object_class=None,
+ attributes=None,
+ controls=None):
+ """
+ Add dn to the DIT, object_class is None, a class name or a list
+ of class names.
+
+ Attributes is a dictionary in the form 'attr': 'val' or 'attr':
+ ['val1', 'val2', ...] for multivalued attributes
+ """
+ conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
+ conf_classes_excluded_from_check = [v.lower() for v in get_config_parameter('CLASSES_EXCLUDED_FROM_CHECK')]
+ if log_enabled(BASIC):
+ log(BASIC, 'start ADD operation via <%s>', self)
+ self.last_error = None
+ _attributes = deepcopy(attributes) # dict could change when adding objectClass values
+ if self.check_names:
+ dn = safe_dn(dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'dn sanitized to <%s> for ADD operation via <%s>', dn, self)
+
+ with self.connection_lock:
+ self._fire_deferred()
+ attr_object_class = []
+ if object_class is None:
+ parm_object_class = []
+ else:
+ parm_object_class = list(object_class) if isinstance(object_class, SEQUENCE_TYPES) else [object_class]
+
+ object_class_attr_name = ''
+ if _attributes:
+ for attr in _attributes:
+ if attr.lower() == 'objectclass':
+ object_class_attr_name = attr
+ attr_object_class = list(_attributes[object_class_attr_name]) if isinstance(_attributes[object_class_attr_name], SEQUENCE_TYPES) else [_attributes[object_class_attr_name]]
+ break
+ else:
+ _attributes = dict()
+
+ if not object_class_attr_name:
+ object_class_attr_name = 'objectClass'
+
+ attr_object_class = [to_unicode(object_class) for object_class in attr_object_class] # converts objectclass to unicode in case of bytes value
+ _attributes[object_class_attr_name] = reduce(lambda x, y: x + [y] if y not in x else x, parm_object_class + attr_object_class, []) # remove duplicate ObjectClasses
+
+ if not _attributes[object_class_attr_name]:
+ self.last_error = 'objectClass attribute is mandatory'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPObjectClassError(self.last_error)
+
+ if self.server and self.server.schema and self.check_names:
+ for object_class_name in _attributes[object_class_attr_name]:
+ if object_class_name.lower() not in conf_classes_excluded_from_check and object_class_name not in self.server.schema.object_classes:
+ raise LDAPObjectClassError('invalid object class ' + str(object_class_name))
+
+ for attribute_name in _attributes:
+ if ';' in attribute_name: # remove tags for checking
+ attribute_name_to_check = attribute_name.split(';')[0]
+ else:
+ attribute_name_to_check = attribute_name
+
+ if attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
+ raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
+
+ request = add_operation(dn, _attributes, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'ADD request <%s> sent via <%s>', add_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('addRequest', request, controls))
+ self._entries = []
+
+ if isinstance(response, STRING_TYPES + (int, )):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async ADD response id <%s> received via <%s>', return_value, self)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'ADD response <%s> received via <%s>', response, self)
+ return_value = True if self.result['type'] == 'addResponse' and self.result['result'] == RESULT_SUCCESS else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done ADD operation, result <%s>', return_value)
+
+ return return_value
+
+ def delete(self,
+ dn,
+ controls=None):
+ """
+ Delete the entry identified by the DN from the DIB.
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start DELETE operation via <%s>', self)
+ self.last_error = None
+ if self.check_names:
+ dn = safe_dn(dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'dn sanitized to <%s> for DELETE operation via <%s>', dn, self)
+
+ with self.connection_lock:
+ self._fire_deferred()
+ if self.read_only:
+ self.last_error = 'connection is read-only'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPConnectionIsReadOnlyError(self.last_error)
+
+ request = delete_operation(dn)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'DELETE request <%s> sent via <%s>', delete_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('delRequest', request, controls))
+ self._entries = []
+
+ if isinstance(response, STRING_TYPES + (int, )):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async DELETE response id <%s> received via <%s>', return_value, self)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'DELETE response <%s> received via <%s>', response, self)
+ return_value = True if self.result['type'] == 'delResponse' and self.result['result'] == RESULT_SUCCESS else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done DELETE operation, result <%s>', return_value)
+
+ return return_value
+
+ def modify(self,
+ dn,
+ changes,
+ controls=None):
+ """
+ Modify attributes of entry
+
+ - changes is a dictionary in the form {'attribute1': change), 'attribute2': [change, change, ...], ...}
+ - change is (operation, [value1, value2, ...])
+ - operation is 0 (MODIFY_ADD), 1 (MODIFY_DELETE), 2 (MODIFY_REPLACE), 3 (MODIFY_INCREMENT)
+ """
+ conf_attributes_excluded_from_check = [v.lower() for v in get_config_parameter('ATTRIBUTES_EXCLUDED_FROM_CHECK')]
+
+ if log_enabled(BASIC):
+ log(BASIC, 'start MODIFY operation via <%s>', self)
+ self.last_error = None
+ if self.check_names:
+ dn = safe_dn(dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'dn sanitized to <%s> for MODIFY operation via <%s>', dn, self)
+
+ with self.connection_lock:
+ self._fire_deferred()
+ if self.read_only:
+ self.last_error = 'connection is read-only'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPConnectionIsReadOnlyError(self.last_error)
+
+ if not isinstance(changes, dict):
+ self.last_error = 'changes must be a dictionary'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPChangeError(self.last_error)
+
+ if not changes:
+ self.last_error = 'no changes in modify request'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPChangeError(self.last_error)
+
+ changelist = dict()
+ for attribute_name in changes:
+ if self.server and self.server.schema and self.check_names:
+ if ';' in attribute_name: # remove tags for checking
+ attribute_name_to_check = attribute_name.split(';')[0]
+ else:
+ attribute_name_to_check = attribute_name
+
+ if self.server.schema.attribute_types and attribute_name_to_check.lower() not in conf_attributes_excluded_from_check and attribute_name_to_check not in self.server.schema.attribute_types:
+ raise LDAPAttributeError('invalid attribute type ' + attribute_name_to_check)
+ change = changes[attribute_name]
+ if isinstance(change, SEQUENCE_TYPES) and change[0] in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
+ if len(change) != 2:
+ self.last_error = 'malformed change'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPChangeError(self.last_error)
+
+ changelist[attribute_name] = [change] # insert change in a list
+ else:
+ for change_operation in change:
+ if len(change_operation) != 2 or change_operation[0] not in [MODIFY_ADD, MODIFY_DELETE, MODIFY_REPLACE, MODIFY_INCREMENT, 0, 1, 2, 3]:
+ self.last_error = 'invalid change list'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPChangeError(self.last_error)
+ changelist[attribute_name] = change
+ request = modify_operation(dn, changelist, self.auto_encode, self.server.schema if self.server else None, validator=self.server.custom_validator if self.server else None, check_names=self.check_names)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'MODIFY request <%s> sent via <%s>', modify_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('modifyRequest', request, controls))
+ self._entries = []
+
+ if isinstance(response, STRING_TYPES + (int, )):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async MODIFY response id <%s> received via <%s>', return_value, self)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'MODIFY response <%s> received via <%s>', response, self)
+ return_value = True if self.result['type'] == 'modifyResponse' and self.result['result'] == RESULT_SUCCESS else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done MODIFY operation, result <%s>', return_value)
+
+ return return_value
+
+ def modify_dn(self,
+ dn,
+ relative_dn,
+ delete_old_dn=True,
+ new_superior=None,
+ controls=None):
+ """
+ Modify DN of the entry or performs a move of the entry in the
+ DIT.
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start MODIFY DN operation via <%s>', self)
+ self.last_error = None
+ if self.check_names:
+ dn = safe_dn(dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'dn sanitized to <%s> for MODIFY DN operation via <%s>', dn, self)
+ relative_dn = safe_dn(relative_dn)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'relative dn sanitized to <%s> for MODIFY DN operation via <%s>', relative_dn, self)
+
+ with self.connection_lock:
+ self._fire_deferred()
+ if self.read_only:
+ self.last_error = 'connection is read-only'
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', self.last_error, self)
+ raise LDAPConnectionIsReadOnlyError(self.last_error)
+
+ # if new_superior and not dn.startswith(relative_dn): # as per RFC4511 (4.9)
+ # self.last_error = 'DN cannot change while performing moving'
+ # if log_enabled(ERROR):
+ # log(ERROR, '%s for <%s>', self.last_error, self)
+ # raise LDAPChangeError(self.last_error)
+
+ request = modify_dn_operation(dn, relative_dn, delete_old_dn, new_superior)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'MODIFY DN request <%s> sent via <%s>', modify_dn_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('modDNRequest', request, controls))
+ self._entries = []
+
+ if isinstance(response, STRING_TYPES + (int, )):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async MODIFY DN response id <%s> received via <%s>', return_value, self)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'MODIFY DN response <%s> received via <%s>', response, self)
+ return_value = True if self.result['type'] == 'modDNResponse' and self.result['result'] == RESULT_SUCCESS else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done MODIFY DN operation, result <%s>', return_value)
+
+ return return_value
+
+ def abandon(self,
+ message_id,
+ controls=None):
+ """
+ Abandon the operation indicated by message_id
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start ABANDON operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ self._fire_deferred()
+ return_value = False
+ if self.strategy._outstanding or message_id == 0:
+ # only current operation should be abandoned, abandon, bind and unbind cannot ever be abandoned,
+ # messagiId 0 is invalid and should be used as a "ping" to keep alive the connection
+ if (self.strategy._outstanding and message_id in self.strategy._outstanding and self.strategy._outstanding[message_id]['type'] not in ['abandonRequest', 'bindRequest', 'unbindRequest']) or message_id == 0:
+ request = abandon_operation(message_id)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'ABANDON request: <%s> sent via <%s>', abandon_request_to_dict(request), self)
+ self.send('abandonRequest', request, controls)
+ self.result = None
+ self.response = None
+ self._entries = []
+ return_value = True
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'cannot abandon a Bind, an Unbind or an Abandon operation or message ID %s not found via <%s>', str(message_id), self)
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done ABANDON operation, result <%s>', return_value)
+
+ return return_value
+
+ def extended(self,
+ request_name,
+ request_value=None,
+ controls=None,
+ no_encode=None):
+ """
+ Performs an extended operation
+ """
+ if log_enabled(BASIC):
+ log(BASIC, 'start EXTENDED operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ self._fire_deferred()
+ request = extended_operation(request_name, request_value, no_encode=no_encode)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'EXTENDED request <%s> sent via <%s>', extended_request_to_dict(request), self)
+ response = self.post_send_single_response(self.send('extendedReq', request, controls))
+ self._entries = []
+ if isinstance(response, int):
+ return_value = response
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'async EXTENDED response id <%s> received via <%s>', return_value, self)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'EXTENDED response <%s> received via <%s>', response, self)
+ return_value = True if self.result['type'] == 'extendedResp' and self.result['result'] == RESULT_SUCCESS else False
+ if not return_value and self.result['result'] not in [RESULT_SUCCESS] and not self.last_error:
+ self.last_error = self.result['description']
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done EXTENDED operation, result <%s>', return_value)
+
+ return return_value
+
+ def start_tls(self, read_server_info=True): # as per RFC4511. Removal of TLS is defined as MAY in RFC4511 so the client can't implement a generic stop_tls method0
+
+ if log_enabled(BASIC):
+ log(BASIC, 'start START TLS operation via <%s>', self)
+
+ with self.connection_lock:
+ return_value = False
+ if not self.server.tls:
+ self.server.tls = Tls()
+
+ if self.lazy and not self._executing_deferred:
+ self._deferred_start_tls = True
+ self.tls_started = True
+ return_value = True
+ if log_enabled(BASIC):
+ log(BASIC, 'deferring START TLS for <%s>', self)
+ else:
+ self._deferred_start_tls = False
+ if self.closed:
+ self.open()
+ if self.server.tls.start_tls(self) and self.strategy.sync: # for asynchronous connections _start_tls is run by the strategy
+ if read_server_info:
+ self.refresh_server_info() # refresh server info as per RFC4515 (3.1.5)
+ return_value = True
+ elif not self.strategy.sync:
+ return_value = True
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done START TLS operation, result <%s>', return_value)
+
+ return return_value
+
+ def do_sasl_bind(self,
+ controls):
+ if log_enabled(BASIC):
+ log(BASIC, 'start SASL BIND operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ result = None
+
+ if not self.sasl_in_progress:
+ self.sasl_in_progress = True
+ try:
+ if self.sasl_mechanism == EXTERNAL:
+ result = sasl_external(self, controls)
+ elif self.sasl_mechanism == DIGEST_MD5:
+ result = sasl_digest_md5(self, controls)
+ elif self.sasl_mechanism == GSSAPI:
+ from ..protocol.sasl.kerberos import sasl_gssapi # needs the gssapi package
+ result = sasl_gssapi(self, controls)
+ elif self.sasl_mechanism == 'PLAIN':
+ result = sasl_plain(self, controls)
+ finally:
+ self.sasl_in_progress = False
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done SASL BIND operation, result <%s>', result)
+
+ return result
+
+ def do_ntlm_bind(self,
+ controls):
+ if log_enabled(BASIC):
+ log(BASIC, 'start NTLM BIND operation via <%s>', self)
+ self.last_error = None
+ with self.connection_lock:
+ result = None
+ if not self.sasl_in_progress:
+ self.sasl_in_progress = True # ntlm is same of sasl authentication
+ try:
+ # additional import for NTLM
+ from ..utils.ntlm import NtlmClient
+ domain_name, user_name = self.user.split('\\', 1)
+ ntlm_client = NtlmClient(user_name=user_name, domain=domain_name, password=self.password)
+
+ # as per https://msdn.microsoft.com/en-us/library/cc223501.aspx
+ # send a sicilyPackageDiscovery request (in the bindRequest)
+ request = bind_operation(self.version, 'SICILY_PACKAGE_DISCOVERY', ntlm_client)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'NTLM SICILY PACKAGE DISCOVERY request sent via <%s>', self)
+ response = self.post_send_single_response(self.send('bindRequest', request, controls))
+ if not self.strategy.sync:
+ _, result = self.get_response(response)
+ else:
+ result = response[0]
+ if 'server_creds' in result:
+ sicily_packages = result['server_creds'].decode('ascii').split(';')
+ if 'NTLM' in sicily_packages: # NTLM available on server
+ request = bind_operation(self.version, 'SICILY_NEGOTIATE_NTLM', ntlm_client)
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'NTLM SICILY NEGOTIATE request sent via <%s>', self)
+ response = self.post_send_single_response(self.send('bindRequest', request, controls))
+ if not self.strategy.sync:
+ _, result = self.get_response(response)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'NTLM SICILY NEGOTIATE response <%s> received via <%s>', response[0],
+ self)
+ result = response[0]
+
+ if result['result'] == RESULT_SUCCESS:
+ request = bind_operation(self.version, 'SICILY_RESPONSE_NTLM', ntlm_client,
+ result['server_creds'])
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'NTLM SICILY RESPONSE NTLM request sent via <%s>', self)
+ response = self.post_send_single_response(self.send('bindRequest', request, controls))
+ if not self.strategy.sync:
+ _, result = self.get_response(response)
+ else:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'NTLM BIND response <%s> received via <%s>', response[0], self)
+ result = response[0]
+ else:
+ result = None
+ finally:
+ self.sasl_in_progress = False
+
+ if log_enabled(BASIC):
+ log(BASIC, 'done SASL NTLM operation, result <%s>', result)
+
+ return result
+
+ def refresh_server_info(self):
+ # if self.strategy.no_real_dsa: # do not refresh for mock strategies
+ # return
+
+ if not self.strategy.pooled:
+ with self.connection_lock:
+ if not self.closed:
+ if log_enabled(BASIC):
+ log(BASIC, 'refreshing server info for <%s>', self)
+ previous_response = self.response
+ previous_result = self.result
+ previous_entries = self._entries
+ self.server.get_info_from_server(self)
+ self.response = previous_response
+ self.result = previous_result
+ self._entries = previous_entries
+ else:
+ if log_enabled(BASIC):
+ log(BASIC, 'refreshing server info from pool for <%s>', self)
+ self.strategy.pool.get_info_from_server()
+
+ def response_to_ldif(self,
+ search_result=None,
+ all_base64=False,
+ line_separator=None,
+ sort_order=None,
+ stream=None):
+ with self.connection_lock:
+ if search_result is None:
+ search_result = self.response
+
+ if isinstance(search_result, SEQUENCE_TYPES):
+ ldif_lines = operation_to_ldif('searchResponse', search_result, all_base64, sort_order=sort_order)
+ ldif_lines = add_ldif_header(ldif_lines)
+ line_separator = line_separator or linesep
+ ldif_output = line_separator.join(ldif_lines)
+ if stream:
+ if stream.tell() == 0:
+ header = add_ldif_header(['-'])[0]
+ stream.write(prepare_for_stream(header + line_separator + line_separator))
+ stream.write(prepare_for_stream(ldif_output + line_separator + line_separator))
+ if log_enabled(BASIC):
+ log(BASIC, 'building LDIF output <%s> for <%s>', ldif_output, self)
+ return ldif_output
+
+ return None
+
+ def response_to_json(self,
+ raw=False,
+ search_result=None,
+ indent=4,
+ sort=True,
+ stream=None,
+ checked_attributes=True,
+ include_empty=True):
+
+ with self.connection_lock:
+ if search_result is None:
+ search_result = self.response
+
+ if isinstance(search_result, SEQUENCE_TYPES):
+ json_dict = dict()
+ json_dict['entries'] = []
+
+ for response in search_result:
+ if response['type'] == 'searchResEntry':
+ entry = dict()
+
+ entry['dn'] = response['dn']
+ if checked_attributes:
+ if not include_empty:
+ # needed for python 2.6 compatibility
+ entry['attributes'] = dict((key, response['attributes'][key]) for key in response['attributes'] if response['attributes'][key])
+ else:
+ entry['attributes'] = dict(response['attributes'])
+ if raw:
+ if not include_empty:
+ # needed for python 2.6 compatibility
+ entry['raw_attributes'] = dict((key, response['raw_attributes'][key]) for key in response['raw_attributes'] if response['raw:attributes'][key])
+ else:
+ entry['raw'] = dict(response['raw_attributes'])
+ json_dict['entries'].append(entry)
+
+ if str is bytes: # Python 2
+ check_json_dict(json_dict)
+
+ json_output = json.dumps(json_dict, ensure_ascii=True, sort_keys=sort, indent=indent, check_circular=True, default=format_json, separators=(',', ': '))
+
+ if log_enabled(BASIC):
+ log(BASIC, 'building JSON output <%s> for <%s>', json_output, self)
+ if stream:
+ stream.write(json_output)
+
+ return json_output
+
+ def response_to_file(self,
+ target,
+ raw=False,
+ indent=4,
+ sort=True):
+ with self.connection_lock:
+ if self.response:
+ if isinstance(target, STRING_TYPES):
+ target = open(target, 'w+')
+
+ if log_enabled(BASIC):
+ log(BASIC, 'writing response to file for <%s>', self)
+
+ target.writelines(self.response_to_json(raw=raw, indent=indent, sort=sort))
+ target.close()
+
+ def _fire_deferred(self, read_info=True):
+ with self.connection_lock:
+ if self.lazy and not self._executing_deferred:
+ self._executing_deferred = True
+
+ if log_enabled(BASIC):
+ log(BASIC, 'executing deferred (open: %s, start_tls: %s, bind: %s) for <%s>', self._deferred_open, self._deferred_start_tls, self._deferred_bind, self)
+ try:
+ if self._deferred_open:
+ self.open(read_server_info=False)
+ if self._deferred_start_tls:
+ self.start_tls(read_server_info=False)
+ if self._deferred_bind:
+ self.bind(read_server_info=False, controls=self._bind_controls)
+ if read_info:
+ self.refresh_server_info()
+ except LDAPExceptionError as e:
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', e, self)
+ raise # re-raise LDAPExceptionError
+ finally:
+ self._executing_deferred = False
+
+ @property
+ def entries(self):
+ if self.response:
+ if not self._entries:
+ self._entries = self._get_entries(self.response)
+ return self._entries
+
+ def _get_entries(self, search_response):
+ with self.connection_lock:
+ from .. import ObjectDef, Reader
+
+ # build a table of ObjectDefs, grouping the entries found in search_response for their attributes set, subset will be included in superset
+ attr_sets = []
+ for response in search_response:
+ if response['type'] == 'searchResEntry':
+ resp_attr_set = set(response['attributes'].keys())
+ if resp_attr_set not in attr_sets:
+ attr_sets.append(resp_attr_set)
+ attr_sets.sort(key=lambda x: -len(x)) # sorts the list in descending length order
+ unique_attr_sets = []
+ for attr_set in attr_sets:
+ for unique_set in unique_attr_sets:
+ if unique_set >= attr_set: # checks if unique set is a superset of attr_set
+ break
+ else: # the attr_set is not a subset of any element in unique_attr_sets
+ unique_attr_sets.append(attr_set)
+ object_defs = []
+ for attr_set in unique_attr_sets:
+ object_def = ObjectDef(schema=self.server.schema)
+ object_def += list(attr_set) # converts the set in a list to be added to the object definition
+ object_defs.append((attr_set,
+ object_def,
+ Reader(self, object_def, self.request['base'], self.request['filter'], attributes=attr_set) if self.strategy.sync else Reader(self, object_def, '', '', attributes=attr_set))
+ ) # objects_defs contains a tuple with the set, the ObjectDef and a cursor
+
+ entries = []
+ for response in search_response:
+ if response['type'] == 'searchResEntry':
+ resp_attr_set = set(response['attributes'].keys())
+ for object_def in object_defs:
+ if resp_attr_set <= object_def[0]: # finds the ObjectDef for the attribute set of this entry
+ entry = object_def[2]._create_entry(response)
+ entries.append(entry)
+ break
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'attribute set not found for %s in <%s>', resp_attr_set, self)
+ raise LDAPObjectError('attribute set not found for ' + str(resp_attr_set))
+
+ return entries
diff --git a/ldap3/core/exceptions.py b/ldap3/core/exceptions.py
index 81f1696..29aed26 100644
--- a/ldap3/core/exceptions.py
+++ b/ldap3/core/exceptions.py
@@ -1,597 +1,609 @@
-"""
-"""
-
-# Created on 2014.05.14
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from os import sep
-from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \
- RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
- RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \
- RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \
- RESULT_CONSTRAINT_VIOLATION, \
- RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \
- RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \
- RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \
- RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \
- RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \
- RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \
- RESULT_NOT_ALLOWED_ON_NON_LEAF, \
- RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \
- RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \
- RESULT_INAPPROPRIATE_AUTHENTICATION
-import socket
-
-
-# LDAPException hierarchy
-class LDAPException(Exception):
- pass
-
-
-class LDAPOperationResult(LDAPException):
- def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None):
- if cls is LDAPOperationResult and result and result in exception_table:
- exc = super(LDAPOperationResult, exception_table[result]).__new__(
- exception_table[result]) # create an exception of the required result error
- exc.result = result
- exc.description = description
- exc.dn = dn
- exc.message = message
- exc.type = response_type
- exc.response = response
- else:
- exc = super(LDAPOperationResult, cls).__new__(cls)
- return exc
-
- def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None):
- self.result = result
- self.description = description
- self.dn = dn
- self.message = message
- self.type = response_type
- self.response = response
-
- def __str__(self):
- s = [self.__class__.__name__,
- str(self.result) if self.result else None,
- self.description if self.description else None,
- self.dn if self.dn else None,
- self.message if self.message else None,
- self.type if self.type else None,
- self.response if self.response else None]
-
- return ' - '.join([str(item) for item in s if s is not None])
-
- def __repr__(self):
- return self.__str__()
-
-
-class LDAPOperationsErrorResult(LDAPOperationResult):
- pass
-
-
-class LDAPProtocolErrorResult(LDAPOperationResult):
- pass
-
-
-class LDAPTimeLimitExceededResult(LDAPOperationResult):
- pass
-
-
-class LDAPSizeLimitExceededResult(LDAPOperationResult):
- pass
-
-
-class LDAPAuthMethodNotSupportedResult(LDAPOperationResult):
- pass
-
-
-class LDAPStrongerAuthRequiredResult(LDAPOperationResult):
- pass
-
-
-class LDAPReferralResult(LDAPOperationResult):
- pass
-
-
-class LDAPAdminLimitExceededResult(LDAPOperationResult):
- pass
-
-
-class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult):
- pass
-
-
-class LDAPConfidentialityRequiredResult(LDAPOperationResult):
- pass
-
-
-class LDAPSASLBindInProgressResult(LDAPOperationResult):
- pass
-
-
-class LDAPNoSuchAttributeResult(LDAPOperationResult):
- pass
-
-
-class LDAPUndefinedAttributeTypeResult(LDAPOperationResult):
- pass
-
-
-class LDAPInappropriateMatchingResult(LDAPOperationResult):
- pass
-
-
-class LDAPConstraintViolationResult(LDAPOperationResult):
- pass
-
-
-class LDAPAttributeOrValueExistsResult(LDAPOperationResult):
- pass
-
-
-class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult):
- pass
-
-
-class LDAPNoSuchObjectResult(LDAPOperationResult):
- pass
-
-
-class LDAPAliasProblemResult(LDAPOperationResult):
- pass
-
-
-class LDAPInvalidDNSyntaxResult(LDAPOperationResult):
- pass
-
-
-class LDAPAliasDereferencingProblemResult(LDAPOperationResult):
- pass
-
-
-class LDAPInappropriateAuthenticationResult(LDAPOperationResult):
- pass
-
-
-class LDAPInvalidCredentialsResult(LDAPOperationResult):
- pass
-
-
-class LDAPInsufficientAccessRightsResult(LDAPOperationResult):
- pass
-
-
-class LDAPBusyResult(LDAPOperationResult):
- pass
-
-
-class LDAPUnavailableResult(LDAPOperationResult):
- pass
-
-
-class LDAPUnwillingToPerformResult(LDAPOperationResult):
- pass
-
-
-class LDAPLoopDetectedResult(LDAPOperationResult):
- pass
-
-
-class LDAPNamingViolationResult(LDAPOperationResult):
- pass
-
-
-class LDAPObjectClassViolationResult(LDAPOperationResult):
- pass
-
-
-class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult):
- pass
-
-
-class LDAPNotAllowedOnRDNResult(LDAPOperationResult):
- pass
-
-
-class LDAPEntryAlreadyExistsResult(LDAPOperationResult):
- pass
-
-
-class LDAPObjectClassModsProhibitedResult(LDAPOperationResult):
- pass
-
-
-class LDAPAffectMultipleDSASResult(LDAPOperationResult):
- pass
-
-
-class LDAPOtherResult(LDAPOperationResult):
- pass
-
-
-class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult):
- pass
-
-
-class LDAPLCUPSecurityViolationResult(LDAPOperationResult):
- pass
-
-
-class LDAPLCUPInvalidDataResult(LDAPOperationResult):
- pass
-
-
-class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult):
- pass
-
-
-class LDAPLCUPReloadRequiredResult(LDAPOperationResult):
- pass
-
-
-class LDAPCanceledResult(LDAPOperationResult):
- pass
-
-
-class LDAPNoSuchOperationResult(LDAPOperationResult):
- pass
-
-
-class LDAPTooLateResult(LDAPOperationResult):
- pass
-
-
-class LDAPCannotCancelResult(LDAPOperationResult):
- pass
-
-
-class LDAPAssertionFailedResult(LDAPOperationResult):
- pass
-
-
-class LDAPAuthorizationDeniedResult(LDAPOperationResult):
- pass
-
-
-class LDAPESyncRefreshRequiredResult(LDAPOperationResult):
- pass
-
-
-exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult,
- RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult,
- RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult,
- RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult,
- RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult,
- RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult,
- RESULT_REFERRAL: LDAPReferralResult,
- RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult,
- RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult,
- RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult,
- RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult,
- RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult,
- RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult,
- RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult,
- RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult,
- RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult,
- RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult,
- RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult,
- RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult,
- RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult,
- RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult,
- RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult,
- RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult,
- RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult,
- RESULT_BUSY: LDAPBusyResult,
- RESULT_UNAVAILABLE: LDAPUnavailableResult,
- RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult,
- RESULT_LOOP_DETECTED: LDAPLoopDetectedResult,
- RESULT_NAMING_VIOLATION: LDAPNamingViolationResult,
- RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult,
- RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult,
- RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult,
- RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult,
- RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult,
- RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult,
- RESULT_OTHER: LDAPOtherResult,
- RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult,
- RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult,
- RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult,
- RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult,
- RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult,
- RESULT_CANCELED: LDAPCanceledResult,
- RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult,
- RESULT_TOO_LATE: LDAPTooLateResult,
- RESULT_CANNOT_CANCEL: LDAPCannotCancelResult,
- RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult,
- RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult,
- RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult}
-
-
-class LDAPExceptionError(LDAPException):
- pass
-
-
-# configuration exceptions
-class LDAPConfigurationError(LDAPExceptionError):
- pass
-
-
-class LDAPUnknownStrategyError(LDAPConfigurationError):
- pass
-
-
-class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError):
- pass
-
-
-class LDAPSSLConfigurationError(LDAPConfigurationError):
- pass
-
-
-class LDAPDefinitionError(LDAPConfigurationError):
- pass
-
-
-class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError):
- pass
-
-
-class LDAPConfigurationParameterError(LDAPConfigurationError):
- pass
-
-
-# abstract layer exceptions
-class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError):
- pass
-
-
-class LDAPObjectError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError):
- pass
-
-
-class LDAPCursorError(LDAPExceptionError):
- pass
-
-class LDAPObjectDereferenceError(LDAPExceptionError):
- pass
-
-# security exceptions
-class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError):
- pass
-
-
-class LDAPInvalidTlsSpecificationError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError):
- pass
-
-
-# connection exceptions
-class LDAPBindError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidServerError(LDAPExceptionError):
- pass
-
-
-class LDAPSASLMechanismNotSupportedError(LDAPExceptionError):
- pass
-
-
-class LDAPConnectionIsReadOnlyError(LDAPExceptionError):
- pass
-
-
-class LDAPChangeError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPServerPoolError(LDAPExceptionError):
- pass
-
-
-class LDAPServerPoolExhaustedError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidPortError(LDAPExceptionError):
- pass
-
-
-class LDAPStartTLSError(LDAPExceptionError):
- pass
-
-
-class LDAPCertificateError(LDAPExceptionError):
- pass
-
-
-class LDAPUserNameNotAllowedError(LDAPExceptionError):
- pass
-
-
-class LDAPUserNameIsMandatoryError(LDAPExceptionError):
- pass
-
-
-class LDAPPasswordIsMandatoryError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidFilterError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidScopeError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPInvalidValueError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPControlError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPExtensionError(LDAPExceptionError, ValueError):
- pass
-
-
-class LDAPLDIFError(LDAPExceptionError):
- pass
-
-
-class LDAPSchemaError(LDAPExceptionError):
- pass
-
-
-class LDAPSASLPrepError(LDAPExceptionError):
- pass
-
-
-class LDAPSASLBindInProgressError(LDAPExceptionError):
- pass
-
-
-class LDAPMetricsError(LDAPExceptionError):
- pass
-
-
-class LDAPObjectClassError(LDAPExceptionError):
- pass
-
-
-class LDAPInvalidDnError(LDAPExceptionError):
- pass
-
-
-class LDAPResponseTimeoutError(LDAPExceptionError):
- pass
-
-
-class LDAPTransactionError(LDAPExceptionError):
- pass
-
-
-# communication exceptions
-class LDAPCommunicationError(LDAPExceptionError):
- pass
-
-
-class LDAPSocketOpenError(LDAPCommunicationError):
- pass
-
-
-class LDAPSocketCloseError(LDAPCommunicationError):
- pass
-
-
-class LDAPSocketReceiveError(LDAPCommunicationError, socket.error):
- pass
-
-
-class LDAPSocketSendError(LDAPCommunicationError, socket.error):
- pass
-
-
-class LDAPSessionTerminatedByServerError(LDAPCommunicationError):
- pass
-
-
-class LDAPUnknownResponseError(LDAPCommunicationError):
- pass
-
-
-class LDAPUnknownRequestError(LDAPCommunicationError):
- pass
-
-
-class LDAPReferralError(LDAPCommunicationError):
- pass
-
-
-# pooling exceptions
-class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError):
- pass
-
-
-class LDAPConnectionPoolNotStartedError(LDAPExceptionError):
- pass
-
-
-# restartable strategy
-class LDAPMaximumRetriesError(LDAPExceptionError):
- def __str__(self):
- s = []
- if self.args:
- if isinstance(self.args, tuple):
- if len(self.args) > 0:
- s.append('LDAPMaximumRetriesError: ' + str(self.args[0]))
- if len(self.args) > 1:
- s.append('Exception history:')
- prev_exc = ''
- for i, exc in enumerate(self.args[1]): # args[1] contains exception history
- if str(exc[1]) != prev_exc:
- s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2])))
- prev_exc = str(exc[1])
-
- if len(self.args) > 2:
- s.append('Maximum number of retries reached: ' + str(self.args[2]))
- else:
- s = [LDAPExceptionError.__str__(self)]
-
- return sep.join(s)
-
-
-# exception factories
-def communication_exception_factory(exc_to_raise, exc):
- """
- Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
- """
- if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]:
- return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
- else:
- raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
-
-
-def start_tls_exception_factory(exc_to_raise, exc):
- """
- Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
- """
-
- if exc_to_raise.__name__ == 'LDAPStartTLSError':
- return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
- else:
- raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
+"""
+"""
+
+# Created on 2014.05.14
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from os import sep
+from .results import RESULT_OPERATIONS_ERROR, RESULT_PROTOCOL_ERROR, RESULT_TIME_LIMIT_EXCEEDED, RESULT_SIZE_LIMIT_EXCEEDED, \
+ RESULT_STRONGER_AUTH_REQUIRED, RESULT_REFERRAL, RESULT_ADMIN_LIMIT_EXCEEDED, RESULT_UNAVAILABLE_CRITICAL_EXTENSION, \
+ RESULT_AUTH_METHOD_NOT_SUPPORTED, RESULT_UNDEFINED_ATTRIBUTE_TYPE, RESULT_NO_SUCH_ATTRIBUTE, \
+ RESULT_SASL_BIND_IN_PROGRESS, RESULT_CONFIDENTIALITY_REQUIRED, RESULT_INAPPROPRIATE_MATCHING, \
+ RESULT_CONSTRAINT_VIOLATION, \
+ RESULT_ATTRIBUTE_OR_VALUE_EXISTS, RESULT_INVALID_ATTRIBUTE_SYNTAX, RESULT_NO_SUCH_OBJECT, RESULT_ALIAS_PROBLEM, \
+ RESULT_INVALID_DN_SYNTAX, RESULT_ALIAS_DEREFERENCING_PROBLEM, RESULT_INVALID_CREDENTIALS, RESULT_LOOP_DETECTED, \
+ RESULT_ENTRY_ALREADY_EXISTS, RESULT_LCUP_SECURITY_VIOLATION, RESULT_CANCELED, RESULT_E_SYNC_REFRESH_REQUIRED, \
+ RESULT_NO_SUCH_OPERATION, RESULT_LCUP_INVALID_DATA, RESULT_OBJECT_CLASS_MODS_PROHIBITED, RESULT_NAMING_VIOLATION, \
+ RESULT_INSUFFICIENT_ACCESS_RIGHTS, RESULT_OBJECT_CLASS_VIOLATION, RESULT_TOO_LATE, RESULT_CANNOT_CANCEL, \
+ RESULT_LCUP_UNSUPPORTED_SCHEME, RESULT_BUSY, RESULT_AFFECT_MULTIPLE_DSAS, RESULT_UNAVAILABLE, \
+ RESULT_NOT_ALLOWED_ON_NON_LEAF, \
+ RESULT_UNWILLING_TO_PERFORM, RESULT_OTHER, RESULT_LCUP_RELOAD_REQUIRED, RESULT_ASSERTION_FAILED, \
+ RESULT_AUTHORIZATION_DENIED, RESULT_LCUP_RESOURCES_EXHAUSTED, RESULT_NOT_ALLOWED_ON_RDN, \
+ RESULT_INAPPROPRIATE_AUTHENTICATION
+import socket
+
+
+# LDAPException hierarchy
+class LDAPException(Exception):
+ pass
+
+
+class LDAPOperationResult(LDAPException):
+ def __new__(cls, result=None, description=None, dn=None, message=None, response_type=None, response=None):
+ if cls is LDAPOperationResult and result and result in exception_table:
+ exc = super(LDAPOperationResult, exception_table[result]).__new__(
+ exception_table[result]) # create an exception of the required result error
+ exc.result = result
+ exc.description = description
+ exc.dn = dn
+ exc.message = message
+ exc.type = response_type
+ exc.response = response
+ else:
+ exc = super(LDAPOperationResult, cls).__new__(cls)
+ return exc
+
+ def __init__(self, result=None, description=None, dn=None, message=None, response_type=None, response=None):
+ self.result = result
+ self.description = description
+ self.dn = dn
+ self.message = message
+ self.type = response_type
+ self.response = response
+
+ def __str__(self):
+ s = [self.__class__.__name__,
+ str(self.result) if self.result else None,
+ self.description if self.description else None,
+ self.dn if self.dn else None,
+ self.message if self.message else None,
+ self.type if self.type else None,
+ self.response if self.response else None]
+
+ return ' - '.join([str(item) for item in s if s is not None])
+
+ def __repr__(self):
+ return self.__str__()
+
+
+class LDAPOperationsErrorResult(LDAPOperationResult):
+ pass
+
+
+class LDAPProtocolErrorResult(LDAPOperationResult):
+ pass
+
+
+class LDAPTimeLimitExceededResult(LDAPOperationResult):
+ pass
+
+
+class LDAPSizeLimitExceededResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAuthMethodNotSupportedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPStrongerAuthRequiredResult(LDAPOperationResult):
+ pass
+
+
+class LDAPReferralResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAdminLimitExceededResult(LDAPOperationResult):
+ pass
+
+
+class LDAPUnavailableCriticalExtensionResult(LDAPOperationResult):
+ pass
+
+
+class LDAPConfidentialityRequiredResult(LDAPOperationResult):
+ pass
+
+
+class LDAPSASLBindInProgressResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNoSuchAttributeResult(LDAPOperationResult):
+ pass
+
+
+class LDAPUndefinedAttributeTypeResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInappropriateMatchingResult(LDAPOperationResult):
+ pass
+
+
+class LDAPConstraintViolationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAttributeOrValueExistsResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInvalidAttributeSyntaxResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNoSuchObjectResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAliasProblemResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInvalidDNSyntaxResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAliasDereferencingProblemResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInappropriateAuthenticationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInvalidCredentialsResult(LDAPOperationResult):
+ pass
+
+
+class LDAPInsufficientAccessRightsResult(LDAPOperationResult):
+ pass
+
+
+class LDAPBusyResult(LDAPOperationResult):
+ pass
+
+
+class LDAPUnavailableResult(LDAPOperationResult):
+ pass
+
+
+class LDAPUnwillingToPerformResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLoopDetectedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNamingViolationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPObjectClassViolationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNotAllowedOnNotLeafResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNotAllowedOnRDNResult(LDAPOperationResult):
+ pass
+
+
+class LDAPEntryAlreadyExistsResult(LDAPOperationResult):
+ pass
+
+
+class LDAPObjectClassModsProhibitedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAffectMultipleDSASResult(LDAPOperationResult):
+ pass
+
+
+class LDAPOtherResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLCUPResourcesExhaustedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLCUPSecurityViolationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLCUPInvalidDataResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLCUPUnsupportedSchemeResult(LDAPOperationResult):
+ pass
+
+
+class LDAPLCUPReloadRequiredResult(LDAPOperationResult):
+ pass
+
+
+class LDAPCanceledResult(LDAPOperationResult):
+ pass
+
+
+class LDAPNoSuchOperationResult(LDAPOperationResult):
+ pass
+
+
+class LDAPTooLateResult(LDAPOperationResult):
+ pass
+
+
+class LDAPCannotCancelResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAssertionFailedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPAuthorizationDeniedResult(LDAPOperationResult):
+ pass
+
+
+class LDAPESyncRefreshRequiredResult(LDAPOperationResult):
+ pass
+
+
+exception_table = {RESULT_OPERATIONS_ERROR: LDAPOperationsErrorResult,
+ RESULT_PROTOCOL_ERROR: LDAPProtocolErrorResult,
+ RESULT_TIME_LIMIT_EXCEEDED: LDAPTimeLimitExceededResult,
+ RESULT_SIZE_LIMIT_EXCEEDED: LDAPSizeLimitExceededResult,
+ RESULT_AUTH_METHOD_NOT_SUPPORTED: LDAPAuthMethodNotSupportedResult,
+ RESULT_STRONGER_AUTH_REQUIRED: LDAPStrongerAuthRequiredResult,
+ RESULT_REFERRAL: LDAPReferralResult,
+ RESULT_ADMIN_LIMIT_EXCEEDED: LDAPAdminLimitExceededResult,
+ RESULT_UNAVAILABLE_CRITICAL_EXTENSION: LDAPUnavailableCriticalExtensionResult,
+ RESULT_CONFIDENTIALITY_REQUIRED: LDAPConfidentialityRequiredResult,
+ RESULT_SASL_BIND_IN_PROGRESS: LDAPSASLBindInProgressResult,
+ RESULT_NO_SUCH_ATTRIBUTE: LDAPNoSuchAttributeResult,
+ RESULT_UNDEFINED_ATTRIBUTE_TYPE: LDAPUndefinedAttributeTypeResult,
+ RESULT_INAPPROPRIATE_MATCHING: LDAPInappropriateMatchingResult,
+ RESULT_CONSTRAINT_VIOLATION: LDAPConstraintViolationResult,
+ RESULT_ATTRIBUTE_OR_VALUE_EXISTS: LDAPAttributeOrValueExistsResult,
+ RESULT_INVALID_ATTRIBUTE_SYNTAX: LDAPInvalidAttributeSyntaxResult,
+ RESULT_NO_SUCH_OBJECT: LDAPNoSuchObjectResult,
+ RESULT_ALIAS_PROBLEM: LDAPAliasProblemResult,
+ RESULT_INVALID_DN_SYNTAX: LDAPInvalidDNSyntaxResult,
+ RESULT_ALIAS_DEREFERENCING_PROBLEM: LDAPAliasDereferencingProblemResult,
+ RESULT_INAPPROPRIATE_AUTHENTICATION: LDAPInappropriateAuthenticationResult,
+ RESULT_INVALID_CREDENTIALS: LDAPInvalidCredentialsResult,
+ RESULT_INSUFFICIENT_ACCESS_RIGHTS: LDAPInsufficientAccessRightsResult,
+ RESULT_BUSY: LDAPBusyResult,
+ RESULT_UNAVAILABLE: LDAPUnavailableResult,
+ RESULT_UNWILLING_TO_PERFORM: LDAPUnwillingToPerformResult,
+ RESULT_LOOP_DETECTED: LDAPLoopDetectedResult,
+ RESULT_NAMING_VIOLATION: LDAPNamingViolationResult,
+ RESULT_OBJECT_CLASS_VIOLATION: LDAPObjectClassViolationResult,
+ RESULT_NOT_ALLOWED_ON_NON_LEAF: LDAPNotAllowedOnNotLeafResult,
+ RESULT_NOT_ALLOWED_ON_RDN: LDAPNotAllowedOnRDNResult,
+ RESULT_ENTRY_ALREADY_EXISTS: LDAPEntryAlreadyExistsResult,
+ RESULT_OBJECT_CLASS_MODS_PROHIBITED: LDAPObjectClassModsProhibitedResult,
+ RESULT_AFFECT_MULTIPLE_DSAS: LDAPAffectMultipleDSASResult,
+ RESULT_OTHER: LDAPOtherResult,
+ RESULT_LCUP_RESOURCES_EXHAUSTED: LDAPLCUPResourcesExhaustedResult,
+ RESULT_LCUP_SECURITY_VIOLATION: LDAPLCUPSecurityViolationResult,
+ RESULT_LCUP_INVALID_DATA: LDAPLCUPInvalidDataResult,
+ RESULT_LCUP_UNSUPPORTED_SCHEME: LDAPLCUPUnsupportedSchemeResult,
+ RESULT_LCUP_RELOAD_REQUIRED: LDAPLCUPReloadRequiredResult,
+ RESULT_CANCELED: LDAPCanceledResult,
+ RESULT_NO_SUCH_OPERATION: LDAPNoSuchOperationResult,
+ RESULT_TOO_LATE: LDAPTooLateResult,
+ RESULT_CANNOT_CANCEL: LDAPCannotCancelResult,
+ RESULT_ASSERTION_FAILED: LDAPAssertionFailedResult,
+ RESULT_AUTHORIZATION_DENIED: LDAPAuthorizationDeniedResult,
+ RESULT_E_SYNC_REFRESH_REQUIRED: LDAPESyncRefreshRequiredResult}
+
+
+class LDAPExceptionError(LDAPException):
+ pass
+
+
+# configuration exceptions
+class LDAPConfigurationError(LDAPExceptionError):
+ pass
+
+
+class LDAPUnknownStrategyError(LDAPConfigurationError):
+ pass
+
+
+class LDAPUnknownAuthenticationMethodError(LDAPConfigurationError):
+ pass
+
+
+class LDAPSSLConfigurationError(LDAPConfigurationError):
+ pass
+
+
+class LDAPDefinitionError(LDAPConfigurationError):
+ pass
+
+
+class LDAPPackageUnavailableError(LDAPConfigurationError, ImportError):
+ pass
+
+
+class LDAPConfigurationParameterError(LDAPConfigurationError):
+ pass
+
+
+# abstract layer exceptions
+class LDAPKeyError(LDAPExceptionError, KeyError, AttributeError):
+ pass
+
+
+class LDAPObjectError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPAttributeError(LDAPExceptionError, ValueError, TypeError):
+ pass
+
+
+class LDAPCursorError(LDAPExceptionError):
+ pass
+
+
+class LDAPCursorAttributeError(LDAPCursorError, AttributeError):
+ pass
+
+
+class LDAPObjectDereferenceError(LDAPExceptionError):
+ pass
+
+
+# security exceptions
+class LDAPSSLNotSupportedError(LDAPExceptionError, ImportError):
+ pass
+
+
+class LDAPInvalidTlsSpecificationError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidHashAlgorithmError(LDAPExceptionError, ValueError):
+ pass
+
+
+# connection exceptions
+class LDAPBindError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidServerError(LDAPExceptionError):
+ pass
+
+
+class LDAPSASLMechanismNotSupportedError(LDAPExceptionError):
+ pass
+
+
+class LDAPConnectionIsReadOnlyError(LDAPExceptionError):
+ pass
+
+
+class LDAPChangeError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPServerPoolError(LDAPExceptionError):
+ pass
+
+
+class LDAPServerPoolExhaustedError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidPortError(LDAPExceptionError):
+ pass
+
+
+class LDAPStartTLSError(LDAPExceptionError):
+ pass
+
+
+class LDAPCertificateError(LDAPExceptionError):
+ pass
+
+
+class LDAPUserNameNotAllowedError(LDAPExceptionError):
+ pass
+
+
+class LDAPUserNameIsMandatoryError(LDAPExceptionError):
+ pass
+
+
+class LDAPPasswordIsMandatoryError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidFilterError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidScopeError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPInvalidDereferenceAliasesError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPInvalidValueError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPControlError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPExtensionError(LDAPExceptionError, ValueError):
+ pass
+
+
+class LDAPLDIFError(LDAPExceptionError):
+ pass
+
+
+class LDAPSchemaError(LDAPExceptionError):
+ pass
+
+
+class LDAPSASLPrepError(LDAPExceptionError):
+ pass
+
+
+class LDAPSASLBindInProgressError(LDAPExceptionError):
+ pass
+
+
+class LDAPMetricsError(LDAPExceptionError):
+ pass
+
+
+class LDAPObjectClassError(LDAPExceptionError):
+ pass
+
+
+class LDAPInvalidDnError(LDAPExceptionError):
+ pass
+
+
+class LDAPResponseTimeoutError(LDAPExceptionError):
+ pass
+
+
+class LDAPTransactionError(LDAPExceptionError):
+ pass
+
+
+class LDAPInfoError(LDAPExceptionError):
+ pass
+
+
+# communication exceptions
+class LDAPCommunicationError(LDAPExceptionError):
+ pass
+
+
+class LDAPSocketOpenError(LDAPCommunicationError):
+ pass
+
+
+class LDAPSocketCloseError(LDAPCommunicationError):
+ pass
+
+
+class LDAPSocketReceiveError(LDAPCommunicationError, socket.error):
+ pass
+
+
+class LDAPSocketSendError(LDAPCommunicationError, socket.error):
+ pass
+
+
+class LDAPSessionTerminatedByServerError(LDAPCommunicationError):
+ pass
+
+
+class LDAPUnknownResponseError(LDAPCommunicationError):
+ pass
+
+
+class LDAPUnknownRequestError(LDAPCommunicationError):
+ pass
+
+
+class LDAPReferralError(LDAPCommunicationError):
+ pass
+
+
+# pooling exceptions
+class LDAPConnectionPoolNameIsMandatoryError(LDAPExceptionError):
+ pass
+
+
+class LDAPConnectionPoolNotStartedError(LDAPExceptionError):
+ pass
+
+
+# restartable strategy
+class LDAPMaximumRetriesError(LDAPExceptionError):
+ def __str__(self):
+ s = []
+ if self.args:
+ if isinstance(self.args, tuple):
+ if len(self.args) > 0:
+ s.append('LDAPMaximumRetriesError: ' + str(self.args[0]))
+ if len(self.args) > 1:
+ s.append('Exception history:')
+ prev_exc = ''
+ for i, exc in enumerate(self.args[1]): # args[1] contains exception history
+ # if str(exc[1]) != prev_exc:
+ # s.append((str(i).rjust(5) + ' ' + str(exc[0]) + ': ' + str(exc[1]) + ' - ' + str(exc[2])))
+ # prev_exc = str(exc[1])
+ if str(exc) != prev_exc:
+ s.append((str(i).rjust(5) + ' ' + str(type(exc)) + ': ' + str(exc)))
+ prev_exc = str(exc)
+ if len(self.args) > 2:
+ s.append('Maximum number of retries reached: ' + str(self.args[2]))
+ else:
+ s = [LDAPExceptionError.__str__(self)]
+
+ return sep.join(s)
+
+
+# exception factories
+def communication_exception_factory(exc_to_raise, exc):
+ """
+ Generates a new exception class of the requested type (subclass of LDAPCommunication) merged with the exception raised by the interpreter
+ """
+ if exc_to_raise.__name__ in [cls.__name__ for cls in LDAPCommunicationError.__subclasses__()]:
+ return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
+ else:
+ raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
+
+
+def start_tls_exception_factory(exc_to_raise, exc):
+ """
+ Generates a new exception class of the requested type merged with the exception raised by the interpreter
+ """
+
+ if exc_to_raise.__name__ == 'LDAPStartTLSError':
+ return type(exc_to_raise.__name__, (exc_to_raise, type(exc)), dict())
+ else:
+ raise LDAPExceptionError('unable to generate exception type ' + str(exc_to_raise))
diff --git a/ldap3/core/pooling.py b/ldap3/core/pooling.py
index 66a0bbd..24a5b0f 100644
--- a/ldap3/core/pooling.py
+++ b/ldap3/core/pooling.py
@@ -1,306 +1,329 @@
-"""
-"""
-
-# Created on 2014.03.14
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from datetime import datetime, MINYEAR
-from os import linesep
-from random import randint
-from time import sleep
-
-from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
-from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError
-from .server import Server
-from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
-
-POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM]
-
-
-class ServerPoolState(object):
- def __init__(self, server_pool):
- self.servers = [] # each element is a list: [server, last_checked_time, available]
- self.strategy = server_pool.strategy
- self.server_pool = server_pool
- self.last_used_server = 0
- self.refresh()
- self.initialize_time = datetime.now()
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated ServerPoolState: <%r>', self)
-
- def __str__(self):
- s = 'servers: ' + linesep
- if self.servers:
- for server in self.servers:
- s += str(server[0]) + linesep
- else:
- s += 'None' + linesep
- s += 'Pool strategy: ' + str(self.strategy) + linesep
- s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.servers[self.last_used_server][0]))
-
- return s
-
- def refresh(self):
- self.servers = []
- for server in self.server_pool.servers:
- self.servers.append([server, datetime(MINYEAR, 1, 1), True]) # server, smallest date ever, supposed available
- self.last_used_server = randint(0, len(self.servers) - 1)
-
- def get_current_server(self):
- return self.servers[self.last_used_server][0]
-
- def get_server(self):
- if self.servers:
- if self.server_pool.strategy == FIRST:
- if self.server_pool.active:
- # returns the first active server
- self.last_used_server = self.find_active_server(starting=0)
- else:
- # returns always the first server - no pooling
- self.last_used_server = 0
- elif self.server_pool.strategy == ROUND_ROBIN:
- if self.server_pool.active:
- # returns the next active server in a circular range
- self.last_used_server = self.find_active_server(self.last_used_server + 1)
- else:
- # returns the next server in a circular range
- self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.servers) else 0
- elif self.server_pool.strategy == RANDOM:
- if self.server_pool.active:
- self.last_used_server = self.find_active_random_server()
- else:
- # returns a random server in the pool
- self.last_used_server = randint(0, len(self.servers) - 1)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy)
- raise LDAPUnknownStrategyError('unknown server pooling strategy')
- if log_enabled(BASIC):
- log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server)
- return self.servers[self.last_used_server][0]
- else:
- if log_enabled(ERROR):
- log(ERROR, 'no servers in Server Pool <%s>', self)
- raise LDAPServerPoolError('no servers in server pool')
-
- def find_active_random_server(self):
- counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
- while counter:
- if log_enabled(NETWORK):
- log(NETWORK, 'entering loop for finding active server in pool <%s>', self)
- temp_list = self.servers[:] # copy
- while temp_list:
- # pops a random server from a temp list and checks its
- # availability, if not available tries another one
- server = temp_list.pop(randint(0, len(temp_list) - 1))
- if not server[2]: # server is offline
- if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server[1]).seconds < self.server_pool.exhaust: # keeps server offline
- if log_enabled(NETWORK):
- log(NETWORK, 'server <%s> excluded from checking because it is offline', server[0])
- continue
- if log_enabled(NETWORK):
- log(NETWORK, 'server <%s> reinserted in pool', server[0])
- server[1] = datetime.now()
- if log_enabled(NETWORK):
- log(NETWORK, 'checking server <%s> for availability', server[0])
- if server[0].check_availability():
- # returns a random active server in the pool
- server[2] = True
- return self.servers.index(server)
- else:
- server[2] = False
- if not isinstance(self.server_pool.active, bool):
- counter -= 1
- if log_enabled(ERROR):
- log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self)
- raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries')
-
- def find_active_server(self, starting):
- conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT')
- counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
- if starting >= len(self.servers):
- starting = 0
-
- while counter:
- if log_enabled(NETWORK):
- log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self)
- index = -1
- pool_size = len(self.servers)
- while index < pool_size - 1:
- index += 1
- offset = index + starting if index + starting < pool_size else index + starting - pool_size
- if not self.servers[offset][2]: # server is offline
- if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - self.servers[offset][1]).seconds < self.server_pool.exhaust: # keeps server offline
- if log_enabled(NETWORK):
- if isinstance(self.server_pool.exhaust, bool):
- log(NETWORK, 'server <%s> excluded from checking because is offline', self.servers[offset][0])
- else:
- log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', self.servers[offset][0], (self.server_pool.exhaust - (datetime.now() - self.servers[offset][1]).seconds))
- continue
- if log_enabled(NETWORK):
- log(NETWORK, 'server <%s> reinserted in pool', self.servers[offset][0])
- self.servers[offset][1] = datetime.now()
- if log_enabled(NETWORK):
- log(NETWORK, 'checking server <%s> for availability', self.servers[offset][0])
- if self.servers[offset][0].check_availability():
- self.servers[offset][2] = True
- return offset
- else:
- self.servers[offset][2] = False # sets server offline
-
- if not isinstance(self.server_pool.active, bool):
- counter -= 1
- if log_enabled(NETWORK):
- log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout)
- sleep(conf_pool_timeout)
-
- if log_enabled(ERROR):
- log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self)
- raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries')
-
- def __len__(self):
- return len(self.servers)
-
-
-class ServerPool(object):
- def __init__(self,
- servers=None,
- pool_strategy=ROUND_ROBIN,
- active=True,
- exhaust=False):
-
- if pool_strategy not in POOLING_STRATEGIES:
- if log_enabled(ERROR):
- log(ERROR, 'unknown pooling strategy <%s>', pool_strategy)
- raise LDAPUnknownStrategyError('unknown pooling strategy')
- if exhaust and not active:
- if log_enabled(ERROR):
- log(ERROR, 'cannot instantiate pool with exhaust and not active')
- raise LDAPServerPoolError('pools can be exhausted only when checking for active servers')
- self.servers = []
- self.pool_states = dict()
- self.active = active
- self.exhaust = exhaust
- if isinstance(servers, SEQUENCE_TYPES + (Server, )):
- self.add(servers)
- elif isinstance(servers, STRING_TYPES):
- self.add(Server(servers))
- self.strategy = pool_strategy
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated ServerPool: <%r>', self)
-
- def __str__(self):
- s = 'servers: ' + linesep
- if self.servers:
- for server in self.servers:
- s += str(server) + linesep
- else:
- s += 'None' + linesep
- s += 'Pool strategy: ' + str(self.strategy)
- s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False')
- s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False')
- return s
-
- def __repr__(self):
- r = 'ServerPool(servers='
- if self.servers:
- r += '['
- for server in self.servers:
- r += server.__repr__() + ', '
- r = r[:-2] + ']'
- else:
- r += 'None'
- r += ', pool_strategy={0.strategy!r}'.format(self)
- r += ', active={0.active!r}'.format(self)
- r += ', exhaust={0.exhaust!r}'.format(self)
- r += ')'
-
- return r
-
- def __len__(self):
- return len(self.servers)
-
- def __getitem__(self, item):
- return self.servers[item]
-
- def __iter__(self):
- return self.servers.__iter__()
-
- def add(self, servers):
- if isinstance(servers, Server):
- if servers not in self.servers:
- self.servers.append(servers)
- elif isinstance(servers, STRING_TYPES):
- self.servers.append(Server(servers))
- elif isinstance(servers, SEQUENCE_TYPES):
- for server in servers:
- if isinstance(server, Server):
- self.servers.append(server)
- elif isinstance(server, STRING_TYPES):
- self.servers.append(Server(server))
- else:
- if log_enabled(ERROR):
- log(ERROR, 'element must be a server in Server Pool <%s>', self)
- raise LDAPServerPoolError('server in ServerPool must be a Server')
- else:
- if log_enabled(ERROR):
- log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self)
- raise LDAPServerPoolError('server must be a Server or a list of Server')
-
- for connection in self.pool_states:
- # notifies connections using this pool to refresh
- self.pool_states[connection].refresh()
-
- def remove(self, server):
- if server in self.servers:
- self.servers.remove(server)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self)
- raise LDAPServerPoolError('server not in server pool')
-
- for connection in self.pool_states:
- # notifies connections using this pool to refresh
- self.pool_states[connection].refresh()
-
- def initialize(self, connection):
- pool_state = ServerPoolState(self)
- # registers pool_state in ServerPool object
- self.pool_states[connection] = pool_state
-
- def get_server(self, connection):
- if connection in self.pool_states:
- return self.pool_states[connection].get_server()
- else:
- if log_enabled(ERROR):
- log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
- raise LDAPServerPoolError('connection not in ServerPoolState')
-
- def get_current_server(self, connection):
- if connection in self.pool_states:
- return self.pool_states[connection].get_current_server()
- else:
- if log_enabled(ERROR):
- log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
- raise LDAPServerPoolError('connection not in ServerPoolState')
+"""
+"""
+
+# Created on 2014.03.14
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from datetime import datetime, MINYEAR
+from os import linesep
+from random import randint
+from time import sleep
+
+from .. import FIRST, ROUND_ROBIN, RANDOM, SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
+from .exceptions import LDAPUnknownStrategyError, LDAPServerPoolError, LDAPServerPoolExhaustedError
+from .server import Server
+from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
+
+POOLING_STRATEGIES = [FIRST, ROUND_ROBIN, RANDOM]
+
+
+class ServerState(object):
+ def __init__(self, server, last_checked_time, available):
+ self.server = server
+ self.last_checked_time = last_checked_time
+ self.available = available
+
+
+class ServerPoolState(object):
+ def __init__(self, server_pool):
+ self.server_states = [] # each element is a ServerState
+ self.strategy = server_pool.strategy
+ self.server_pool = server_pool
+ self.last_used_server = 0
+ self.refresh()
+ self.initialize_time = datetime.now()
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated ServerPoolState: <%r>', self)
+
+ def __str__(self):
+ s = 'servers: ' + linesep
+ if self.server_states:
+ for state in self.server_states:
+ s += str(state.server) + linesep
+ else:
+ s += 'None' + linesep
+ s += 'Pool strategy: ' + str(self.strategy) + linesep
+ s += ' - Last used server: ' + ('None' if self.last_used_server == -1 else str(self.server_states[self.last_used_server].server))
+
+ return s
+
+ def refresh(self):
+ self.server_states = []
+ for server in self.server_pool.servers:
+ self.server_states.append(ServerState(server, datetime(MINYEAR, 1, 1), True)) # server, smallest date ever, supposed available
+ self.last_used_server = randint(0, len(self.server_states) - 1)
+
+ def get_current_server(self):
+ return self.server_states[self.last_used_server].server
+
+ def get_server(self):
+ if self.server_states:
+ if self.server_pool.strategy == FIRST:
+ if self.server_pool.active:
+ # returns the first active server
+ self.last_used_server = self.find_active_server(starting=0)
+ else:
+ # returns always the first server - no pooling
+ self.last_used_server = 0
+ elif self.server_pool.strategy == ROUND_ROBIN:
+ if self.server_pool.active:
+ # returns the next active server in a circular range
+ self.last_used_server = self.find_active_server(self.last_used_server + 1)
+ else:
+ # returns the next server in a circular range
+ self.last_used_server = self.last_used_server + 1 if (self.last_used_server + 1) < len(self.server_states) else 0
+ elif self.server_pool.strategy == RANDOM:
+ if self.server_pool.active:
+ self.last_used_server = self.find_active_random_server()
+ else:
+ # returns a random server in the pool
+ self.last_used_server = randint(0, len(self.server_states) - 1)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'unknown server pooling strategy <%s>', self.server_pool.strategy)
+ raise LDAPUnknownStrategyError('unknown server pooling strategy')
+ if log_enabled(BASIC):
+ log(BASIC, 'server returned from Server Pool: <%s>', self.last_used_server)
+ return self.server_states[self.last_used_server].server
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'no servers in Server Pool <%s>', self)
+ raise LDAPServerPoolError('no servers in server pool')
+
+ def find_active_random_server(self):
+ counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
+ while counter:
+ if log_enabled(NETWORK):
+ log(NETWORK, 'entering loop for finding active server in pool <%s>', self)
+ temp_list = self.server_states[:] # copy
+ while temp_list:
+ # pops a random server from a temp list and checks its
+ # availability, if not available tries another one
+ server_state = temp_list.pop(randint(0, len(temp_list) - 1))
+ if not server_state.available: # server is offline
+ if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust: # keeps server offline
+ if log_enabled(NETWORK):
+ log(NETWORK, 'server <%s> excluded from checking because it is offline', server_state.server)
+ continue
+ if log_enabled(NETWORK):
+ log(NETWORK, 'server <%s> reinserted in pool', server_state.server)
+ server_state.last_checked_time = datetime.now()
+ if log_enabled(NETWORK):
+ log(NETWORK, 'checking server <%s> for availability', server_state.server)
+ if server_state.server.check_availability():
+ # returns a random active server in the pool
+ server_state.available = True
+ return self.server_states.index(server_state)
+ else:
+ server_state.available = False
+ if not isinstance(self.server_pool.active, bool):
+ counter -= 1
+ if log_enabled(ERROR):
+ log(ERROR, 'no random active server available in Server Pool <%s> after maximum number of tries', self)
+ raise LDAPServerPoolExhaustedError('no random active server available in server pool after maximum number of tries')
+
+ def find_active_server(self, starting):
+ conf_pool_timeout = get_config_parameter('POOLING_LOOP_TIMEOUT')
+ counter = self.server_pool.active # can be True for "forever" or the number of cycles to try
+ if starting >= len(self.server_states):
+ starting = 0
+
+ while counter:
+ if log_enabled(NETWORK):
+ log(NETWORK, 'entering loop number <%s> for finding active server in pool <%s>', counter, self)
+ index = -1
+ pool_size = len(self.server_states)
+ while index < pool_size - 1:
+ index += 1
+ offset = index + starting if index + starting < pool_size else index + starting - pool_size
+ server_state = self.server_states[offset]
+ if not server_state.available: # server is offline
+ if (isinstance(self.server_pool.exhaust, bool) and self.server_pool.exhaust) or (datetime.now() - server_state.last_checked_time).seconds < self.server_pool.exhaust: # keeps server offline
+ if log_enabled(NETWORK):
+ if isinstance(self.server_pool.exhaust, bool):
+ log(NETWORK, 'server <%s> excluded from checking because is offline', server_state.server)
+ else:
+ log(NETWORK, 'server <%s> excluded from checking because is offline for %d seconds', server_state.server, (self.server_pool.exhaust - (datetime.now() - server_state.last_checked_time).seconds))
+ continue
+ if log_enabled(NETWORK):
+ log(NETWORK, 'server <%s> reinserted in pool', server_state.server)
+ server_state.last_checked_time = datetime.now()
+ if log_enabled(NETWORK):
+ log(NETWORK, 'checking server <%s> for availability', server_state.server)
+ if server_state.server.check_availability():
+ server_state.available = True
+ return offset
+ else:
+ server_state.available = False # sets server offline
+
+ if not isinstance(self.server_pool.active, bool):
+ counter -= 1
+ if log_enabled(NETWORK):
+ log(NETWORK, 'waiting for %d seconds before retrying pool servers cycle', conf_pool_timeout)
+ sleep(conf_pool_timeout)
+
+ if log_enabled(ERROR):
+ log(ERROR, 'no active server available in Server Pool <%s> after maximum number of tries', self)
+ raise LDAPServerPoolExhaustedError('no active server available in server pool after maximum number of tries')
+
+ def __len__(self):
+ return len(self.server_states)
+
+
+class ServerPool(object):
+ def __init__(self,
+ servers=None,
+ pool_strategy=ROUND_ROBIN,
+ active=True,
+ exhaust=False,
+ single_state=True):
+
+ if pool_strategy not in POOLING_STRATEGIES:
+ if log_enabled(ERROR):
+ log(ERROR, 'unknown pooling strategy <%s>', pool_strategy)
+ raise LDAPUnknownStrategyError('unknown pooling strategy')
+ if exhaust and not active:
+ if log_enabled(ERROR):
+ log(ERROR, 'cannot instantiate pool with exhaust and not active')
+ raise LDAPServerPoolError('pools can be exhausted only when checking for active servers')
+ self.servers = []
+ self.pool_states = dict()
+ self.active = active
+ self.exhaust = exhaust
+ self.single = single_state
+ self._pool_state = None # used for storing the global state of the pool
+ if isinstance(servers, SEQUENCE_TYPES + (Server, )):
+ self.add(servers)
+ elif isinstance(servers, STRING_TYPES):
+ self.add(Server(servers))
+ self.strategy = pool_strategy
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated ServerPool: <%r>', self)
+
+ def __str__(self):
+ s = 'servers: ' + linesep
+ if self.servers:
+ for server in self.servers:
+ s += str(server) + linesep
+ else:
+ s += 'None' + linesep
+ s += 'Pool strategy: ' + str(self.strategy)
+ s += ' - ' + 'active: ' + (str(self.active) if self.active else 'False')
+ s += ' - ' + 'exhaust pool: ' + (str(self.exhaust) if self.exhaust else 'False')
+ return s
+
+ def __repr__(self):
+ r = 'ServerPool(servers='
+ if self.servers:
+ r += '['
+ for server in self.servers:
+ r += server.__repr__() + ', '
+ r = r[:-2] + ']'
+ else:
+ r += 'None'
+ r += ', pool_strategy={0.strategy!r}'.format(self)
+ r += ', active={0.active!r}'.format(self)
+ r += ', exhaust={0.exhaust!r}'.format(self)
+ r += ')'
+
+ return r
+
+ def __len__(self):
+ return len(self.servers)
+
+ def __getitem__(self, item):
+ return self.servers[item]
+
+ def __iter__(self):
+ return self.servers.__iter__()
+
+ def add(self, servers):
+ if isinstance(servers, Server):
+ if servers not in self.servers:
+ self.servers.append(servers)
+ elif isinstance(servers, STRING_TYPES):
+ self.servers.append(Server(servers))
+ elif isinstance(servers, SEQUENCE_TYPES):
+ for server in servers:
+ if isinstance(server, Server):
+ self.servers.append(server)
+ elif isinstance(server, STRING_TYPES):
+ self.servers.append(Server(server))
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'element must be a server in Server Pool <%s>', self)
+ raise LDAPServerPoolError('server in ServerPool must be a Server')
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'server must be a Server of a list of Servers when adding to Server Pool <%s>', self)
+ raise LDAPServerPoolError('server must be a Server or a list of Server')
+
+ if self.single:
+ if self._pool_state:
+ self._pool_state.refresh()
+ else:
+ for connection in self.pool_states:
+ # notifies connections using this pool to refresh
+ self.pool_states[connection].refresh()
+
+ def remove(self, server):
+ if server in self.servers:
+ self.servers.remove(server)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'server %s to be removed not in Server Pool <%s>', server, self)
+ raise LDAPServerPoolError('server not in server pool')
+
+ if self.single:
+ if self._pool_state:
+ self._pool_state.refresh()
+ else:
+ for connection in self.pool_states:
+ # notifies connections using this pool to refresh
+ self.pool_states[connection].refresh()
+
+ def initialize(self, connection):
+ # registers pool_state in ServerPool object
+ if self.single:
+ if not self._pool_state:
+ self._pool_state = ServerPoolState(self)
+ self.pool_states[connection] = self._pool_state
+ else:
+ self.pool_states[connection] = ServerPoolState(self)
+
+ def get_server(self, connection):
+ if connection in self.pool_states:
+ return self.pool_states[connection].get_server()
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
+ raise LDAPServerPoolError('connection not in ServerPoolState')
+
+ def get_current_server(self, connection):
+ if connection in self.pool_states:
+ return self.pool_states[connection].get_current_server()
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'connection <%s> not in Server Pool State <%s>', connection, self)
+ raise LDAPServerPoolError('connection not in ServerPoolState')
diff --git a/ldap3/core/results.py b/ldap3/core/results.py
index 6f10643..14f8f73 100644
--- a/ldap3/core/results.py
+++ b/ldap3/core/results.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -134,4 +134,4 @@ RESULT_CODES = {
}
# do not raise exception for (in raise_exceptions connection mode)
-DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS]
+DO_NOT_RAISE_EXCEPTIONS = [RESULT_SUCCESS, RESULT_COMPARE_FALSE, RESULT_COMPARE_TRUE, RESULT_REFERRAL, RESULT_SASL_BIND_IN_PROGRESS, RESULT_SIZE_LIMIT_EXCEEDED, RESULT_TIME_LIMIT_EXCEEDED]
diff --git a/ldap3/core/server.py b/ldap3/core/server.py
index 811baf6..43189ef 100644
--- a/ldap3/core/server.py
+++ b/ldap3/core/server.py
@@ -1,572 +1,663 @@
-"""
-"""
-
-# Created on 2014.05.31
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-import socket
-from threading import Lock
-from datetime import datetime, MINYEAR
-
-from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES
-from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError
-from ..protocol.formatters.standard import format_attribute_values
-from ..protocol.rfc4511 import LDAP_MAX_INT
-from ..protocol.rfc4512 import SchemaInfo, DsaInfo
-from .tls import Tls
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL
-from ..utils.conv import to_unicode
-
-try:
- from urllib.parse import unquote # Python 3
-except ImportError:
- from urllib import unquote # Python 2
-
-try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme)
- # noinspection PyUnresolvedReferences
- from socket import AF_UNIX
- unix_socket_available = True
-except ImportError:
- unix_socket_available = False
-
-
-class Server(object):
- """
- LDAP Server definition class
-
- Allowed_referral_hosts can be None (default), or a list of tuples of
- allowed servers ip address or names to contact while redirecting
- search to referrals.
-
- The second element of the tuple is a boolean to indicate if
- authentication to that server is allowed; if False only anonymous
- bind will be used.
-
- Per RFC 4516. Use [('*', False)] to allow any host with anonymous
- bind, use [('*', True)] to allow any host with same authentication of
- Server.
- """
-
- _message_counter = 0
- _message_id_lock = Lock() # global lock for message_id shared by all Server objects
-
-
- def __init__(self,
- host,
- port=None,
- use_ssl=False,
- allowed_referral_hosts=None,
- get_info=SCHEMA,
- tls=None,
- formatter=None,
- connect_timeout=None,
- mode=IP_V6_PREFERRED,
- validator=None):
-
- self.ipc = False
- url_given = False
- host = host.strip()
- if host.lower().startswith('ldap://'):
- self.host = host[7:]
- use_ssl = False
- url_given = True
- elif host.lower().startswith('ldaps://'):
- self.host = host[8:]
- use_ssl = True
- url_given = True
- elif host.lower().startswith('ldapi://') and unix_socket_available:
- self.ipc = True
- use_ssl = False
- url_given = True
- elif host.lower().startswith('ldapi://') and not unix_socket_available:
- raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present')
- else:
- self.host = host
-
- if self.ipc:
- if str is bytes: # Python 2
- self.host = unquote(host[7:]).decode('utf-8')
- else: # Python 3
- self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3
- self.port = None
- elif ':' in self.host and self.host.count(':') == 1:
- hostname, _, hostport = self.host.partition(':')
- try:
- port = int(hostport) or port
- except ValueError:
- if log_enabled(ERROR):
- log(ERROR, 'port <%s> must be an integer', port)
- raise LDAPInvalidPortError('port must be an integer')
- self.host = hostname
- elif url_given and self.host.startswith('['):
- hostname, sep, hostport = self.host[1:].partition(']')
- if sep != ']' or not self._is_ipv6(hostname):
- if log_enabled(ERROR):
- log(ERROR, 'invalid IPv6 server address for <%s>', self.host)
- raise LDAPInvalidServerError()
- if len(hostport):
- if not hostport.startswith(':'):
- if log_enabled(ERROR):
- log(ERROR, 'invalid URL in server name for <%s>', self.host)
- raise LDAPInvalidServerError('invalid URL in server name')
- if not hostport[1:].isdecimal():
- if log_enabled(ERROR):
- log(ERROR, 'port must be an integer for <%s>', self.host)
- raise LDAPInvalidPortError('port must be an integer')
- port = int(hostport[1:])
- self.host = hostname
- elif not url_given and self._is_ipv6(self.host):
- pass
- elif self.host.count(':') > 1:
- if log_enabled(ERROR):
- log(ERROR, 'invalid server address for <%s>', self.host)
- raise LDAPInvalidServerError()
-
- if not self.ipc:
- self.host.rstrip('/')
- if not use_ssl and not port:
- port = 389
- elif use_ssl and not port:
- port = 636
-
- if isinstance(port, int):
- if port in range(0, 65535):
- self.port = port
- else:
- if log_enabled(ERROR):
- log(ERROR, 'port <%s> must be in range from 0 to 65535', port)
- raise LDAPInvalidPortError('port must in range from 0 to 65535')
- else:
- if log_enabled(ERROR):
- log(ERROR, 'port <%s> must be an integer', port)
- raise LDAPInvalidPortError('port must be an integer')
-
- if allowed_referral_hosts is None: # defaults to any server with authentication
- allowed_referral_hosts = [('*', True)]
-
- if isinstance(allowed_referral_hosts, SEQUENCE_TYPES):
- self.allowed_referral_hosts = []
- for referral_host in allowed_referral_hosts:
- if isinstance(referral_host, tuple):
- if isinstance(referral_host[1], bool):
- self.allowed_referral_hosts.append(referral_host)
- elif isinstance(allowed_referral_hosts, tuple):
- if isinstance(allowed_referral_hosts[1], bool):
- self.allowed_referral_hosts = [allowed_referral_hosts]
- else:
- self.allowed_referral_hosts = []
-
- self.ssl = True if use_ssl else False
- if tls and not isinstance(tls, Tls):
- if log_enabled(ERROR):
- log(ERROR, 'invalid tls specification: <%s>', tls)
- raise LDAPInvalidTlsSpecificationError('invalid Tls object')
-
- self.tls = Tls() if self.ssl and not tls else tls
-
- if not self.ipc:
- if self._is_ipv6(self.host):
- self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port)
- else:
- self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port)
- else:
- self.name = host
-
- self.get_info = get_info
- self._dsa_info = None
- self._schema_info = None
- self.dit_lock = Lock()
- self.custom_formatter = formatter
- self.custom_validator = validator
- self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called)
- self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever
- self.current_address = None
- self.connect_timeout = connect_timeout
- self.mode = mode
-
- self.get_info_from_server(None) # load offline schema if needed
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Server: <%r>', self)
-
- @staticmethod
- def _is_ipv6(host):
- try:
- socket.inet_pton(socket.AF_INET6, host)
- except (socket.error, AttributeError, ValueError):
- return False
- return True
-
- def __str__(self):
- if self.host:
- s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '')
- else:
- s = object.__str__(self)
- return s
-
- def __repr__(self):
- r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self)
- r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self)
- r += '' if self.tls is None else ', tls={0.tls!r}'.format(self)
- r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self)
- r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self)
- r += '' if not self.mode else ', mode={0.mode!r}'.format(self)
- r += ')'
-
- return r
-
- @property
- def address_info(self):
- conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME')
- if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval:
- # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time
- addresses = None
- try:
- if self.ipc:
- addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)]
- else:
- addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
- except (socket.gaierror, AttributeError):
- pass
-
- if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags
- try:
- addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP)
- except socket.gaierror:
- pass
-
- if addresses:
- self._address_info = [list(address) + [None, None] for address in addresses]
- self._address_info_resolved_time = datetime.now()
- else:
- self._address_info = []
- self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date
-
- if log_enabled(BASIC):
- for address in self._address_info:
- log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2])
- return self._address_info
-
- def update_availability(self, address, available):
- cont = 0
- while cont < len(self._address_info):
- if self.address_info[cont] == address:
- self._address_info[cont][5] = True if available else False
- self._address_info[cont][6] = datetime.now()
- break
- cont += 1
-
- def reset_availability(self):
- for address in self._address_info:
- address[5] = None
- address[6] = None
-
- def check_availability(self):
- """
- Tries to open, connect and close a socket to specified address
- and port to check availability. Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in
- the Server object
- """
- conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT')
- available = False
- self.reset_availability()
- for address in self.candidate_addresses():
- available = True
- try:
- temp_socket = socket.socket(*address[:3])
- if self.connect_timeout:
- temp_socket.settimeout(self.connect_timeout)
- else:
- temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default
- try:
- temp_socket.connect(address[4])
- except socket.error:
- available = False
- finally:
- try:
- temp_socket.shutdown(socket.SHUT_RDWR)
- except socket.error:
- available = False
- finally:
- temp_socket.close()
- except socket.gaierror:
- available = False
-
- if available:
- if log_enabled(BASIC):
- log(BASIC, 'server <%s> available at <%r>', self, address)
- self.update_availability(address, True)
- break # if an available address is found exits immediately
- else:
- self.update_availability(address, False)
- if log_enabled(ERROR):
- log(ERROR, 'server <%s> not available at <%r>', self, address)
-
- return available
-
- @staticmethod
- def next_message_id():
- """
- LDAP messageId is unique for all connections to same server
- """
- with Server._message_id_lock:
- Server._message_counter += 1
- if Server._message_counter >= LDAP_MAX_INT:
- Server._message_counter = 1
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'new message id <%d> generated', Server._message_counter)
-
- return Server._message_counter
-
- def _get_dsa_info(self, connection):
- """
- Retrieve DSE operational attribute as per RFC4512 (5.1).
- """
- if connection.strategy.no_real_dsa: # do not try for mock strategies
- return
-
- if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads
- result = connection.search(search_base='',
- search_filter='(objectClass=*)',
- search_scope=BASE,
- attributes=['altServer', # requests specific dsa info attributes
- 'namingContexts',
- 'supportedControl',
- 'supportedExtension',
- 'supportedFeatures',
- 'supportedCapabilities',
- 'supportedLdapVersion',
- 'supportedSASLMechanisms',
- 'vendorName',
- 'vendorVersion',
- 'subschemaSubentry',
- '*',
- '+'], # requests all remaining attributes (other),
- get_operational_attributes=True)
-
- with self.dit_lock:
- if isinstance(result, bool): # sync request
- self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info
- elif result: # asynchronous request, must check if attributes in response
- results, _ = connection.get_response(result)
- if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
- self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes'])
-
- if log_enabled(BASIC):
- log(BASIC, 'DSA info read for <%s> via <%s>', self, connection)
-
- def _get_schema_info(self, connection, entry=''):
- """
- Retrieve schema from subschemaSubentry DSE attribute, per RFC
- 4512 (4.4 and 5.1); entry = '' means DSE.
- """
- if connection.strategy.no_real_dsa: # do not try for mock strategies
- return
-
- schema_entry = None
- if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo
- if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES):
- schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None
- else:
- schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None
- else:
- result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True)
- if isinstance(result, bool): # sync request
- if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']:
- if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0:
- schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0]
- else: # asynchronous request, must check if subschemaSubentry in attributes
- results, _ = connection.get_response(result)
- if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']:
- if len(results[0]['raw_attributes']['subschemaSubentry']) > 0:
- schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0]
-
- if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads
- if isinstance(schema_entry, bytes) and str is not bytes: # Python 3
- schema_entry = to_unicode(schema_entry, from_server=True)
- result = connection.search(schema_entry,
- search_filter='(objectClass=subschema)',
- search_scope=BASE,
- attributes=['objectClasses', # requests specific subschema attributes
- 'attributeTypes',
- 'ldapSyntaxes',
- 'matchingRules',
- 'matchingRuleUse',
- 'dITContentRules',
- 'dITStructureRules',
- 'nameForms',
- 'createTimestamp',
- 'modifyTimestamp',
- '*'], # requests all remaining attributes (other)
- get_operational_attributes=True
- )
- with self.dit_lock:
- self._schema_info = None
- if result:
- if isinstance(result, bool): # sync request
- self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None
- else: # asynchronous request, must check if attributes in response
- results, result = connection.get_response(result)
- if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
- self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes'])
- if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None
- self._schema_info = None
- if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info
- for attribute in self._schema_info.other:
- self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter)
- if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values
- for attribute in self._dsa_info.other:
- self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter)
- if log_enabled(BASIC):
- log(BASIC, 'schema read for <%s> via <%s>', self, connection)
-
- def get_info_from_server(self, connection):
- """
- reads info from DSE and from subschema
- """
- if connection and not connection.closed:
- if self.get_info in [DSA, ALL]:
- self._get_dsa_info(connection)
- if self.get_info in [SCHEMA, ALL]:
- self._get_schema_info(connection)
- elif self.get_info == OFFLINE_EDIR_8_8_8:
- from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
- self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema))
- self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info))
- elif self.get_info == OFFLINE_AD_2012_R2:
- from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info
- self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema))
- self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info))
- elif self.get_info == OFFLINE_SLAPD_2_4:
- from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info
- self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema))
- self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info))
- elif self.get_info == OFFLINE_DS389_1_3_3:
- from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info
- self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema))
- self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info))
-
- def attach_dsa_info(self, dsa_info=None):
- if isinstance(dsa_info, DsaInfo):
- self._dsa_info = dsa_info
- if log_enabled(BASIC):
- log(BASIC, 'attached DSA info to Server <%s>', self)
-
- def attach_schema_info(self, dsa_schema=None):
- if isinstance(dsa_schema, SchemaInfo):
- self._schema_info = dsa_schema
- if log_enabled(BASIC):
- log(BASIC, 'attached schema info to Server <%s>', self)
-
- @property
- def info(self):
- return self._dsa_info
-
- @property
- def schema(self):
- return self._schema_info
-
- @staticmethod
- def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None):
- """
- Define a dummy server with preloaded schema and info
- :param host: host name
- :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name
- :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name
- :param port: dummy port
- :param use_ssl: use_ssl
- :param formatter: custom formatter
- :return: Server object
- """
- if isinstance(host, SEQUENCE_TYPES):
- dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, tget_info=ALL) # for ServerPool object
- else:
- dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)
- if isinstance(dsa_info, DsaInfo):
- dummy._dsa_info = dsa_info
- elif isinstance(dsa_info, STRING_TYPES):
- try:
- dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string
- except Exception:
- dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name
-
- if not dummy.info:
- if log_enabled(ERROR):
- log(ERROR, 'invalid DSA info for %s', host)
- raise LDAPDefinitionError('invalid dsa info')
-
- if isinstance(dsa_schema, SchemaInfo):
- dummy._schema_info = dsa_schema
- elif isinstance(dsa_schema, STRING_TYPES):
- try:
- dummy._schema_info = SchemaInfo.from_json(dsa_schema)
- except Exception:
- dummy._schema_info = SchemaInfo.from_file(dsa_schema)
-
- if not dummy.schema:
- if log_enabled(ERROR):
- log(ERROR, 'invalid schema info for %s', host)
- raise LDAPDefinitionError('invalid schema info')
-
- if log_enabled(BASIC):
- log(BASIC, 'created server <%s> from definition', dummy)
-
- return dummy
-
- def candidate_addresses(self):
- conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT')
- if self.ipc:
- candidates = self.address_info
- if log_enabled(BASIC):
- log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name)
- else:
- # checks reset availability timeout
- for address in self.address_info:
- if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout):
- address[5] = None
- address[6] = None
-
- # selects server address based on server mode and availability (in address[5])
- addresses = self.address_info[:] # copy to avoid refreshing while searching candidates
- candidates = []
- if addresses:
- if self.mode == IP_SYSTEM_DEFAULT:
- candidates.append(addresses[0])
- elif self.mode == IP_V4_ONLY:
- candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
- elif self.mode == IP_V6_ONLY:
- candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
- elif self.mode == IP_V4_PREFERRED:
- candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
- candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
- elif self.mode == IP_V6_PREFERRED:
- candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
- candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
- else:
- if log_enabled(ERROR):
- log(ERROR, 'invalid server mode for <%s>', self)
- raise LDAPInvalidServerError('invalid server mode')
-
- if log_enabled(BASIC):
- for candidate in candidates:
- log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode)
- return candidates
+"""
+"""
+
+# Created on 2014.05.31
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import socket
+from threading import Lock
+from datetime import datetime, MINYEAR
+
+from .. import DSA, SCHEMA, ALL, BASE, get_config_parameter, OFFLINE_EDIR_8_8_8, OFFLINE_EDIR_9_1_4, OFFLINE_AD_2012_R2, OFFLINE_SLAPD_2_4, OFFLINE_DS389_1_3_3, SEQUENCE_TYPES, IP_SYSTEM_DEFAULT, IP_V4_ONLY, IP_V6_ONLY, IP_V4_PREFERRED, IP_V6_PREFERRED, STRING_TYPES
+from .exceptions import LDAPInvalidServerError, LDAPDefinitionError, LDAPInvalidPortError, LDAPInvalidTlsSpecificationError, LDAPSocketOpenError, LDAPInfoError
+from ..protocol.formatters.standard import format_attribute_values
+from ..protocol.rfc4511 import LDAP_MAX_INT
+from ..protocol.rfc4512 import SchemaInfo, DsaInfo
+from .tls import Tls
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK
+from ..utils.conv import to_unicode
+from ..utils.port_validators import check_port, check_port_and_port_list
+
+try:
+ from urllib.parse import unquote # Python 3
+except ImportError:
+ from urllib import unquote # Python 2
+
+try: # try to discover if unix sockets are available for LDAP over IPC (ldapi:// scheme)
+ # noinspection PyUnresolvedReferences
+ from socket import AF_UNIX
+ unix_socket_available = True
+except ImportError:
+ unix_socket_available = False
+
+
+class Server(object):
+ """
+ LDAP Server definition class
+
+ Allowed_referral_hosts can be None (default), or a list of tuples of
+ allowed servers ip address or names to contact while redirecting
+ search to referrals.
+
+ The second element of the tuple is a boolean to indicate if
+ authentication to that server is allowed; if False only anonymous
+ bind will be used.
+
+ Per RFC 4516. Use [('*', False)] to allow any host with anonymous
+ bind, use [('*', True)] to allow any host with same authentication of
+ Server.
+ """
+
+ _message_counter = 0
+ _message_id_lock = Lock() # global lock for message_id shared by all Server objects
+
+ def __init__(self,
+ host,
+ port=None,
+ use_ssl=False,
+ allowed_referral_hosts=None,
+ get_info=SCHEMA,
+ tls=None,
+ formatter=None,
+ connect_timeout=None,
+ mode=IP_V6_PREFERRED,
+ validator=None):
+
+ self.ipc = False
+ url_given = False
+ host = host.strip()
+ if host.lower().startswith('ldap://'):
+ self.host = host[7:]
+ use_ssl = False
+ url_given = True
+ elif host.lower().startswith('ldaps://'):
+ self.host = host[8:]
+ use_ssl = True
+ url_given = True
+ elif host.lower().startswith('ldapi://') and unix_socket_available:
+ self.ipc = True
+ use_ssl = False
+ url_given = True
+ elif host.lower().startswith('ldapi://') and not unix_socket_available:
+ raise LDAPSocketOpenError('LDAP over IPC not available - UNIX sockets non present')
+ else:
+ self.host = host
+
+ if self.ipc:
+ if str is bytes: # Python 2
+ self.host = unquote(host[7:]).decode('utf-8')
+ else: # Python 3
+ self.host = unquote(host[7:]) # encoding defaults to utf-8 in python3
+ self.port = None
+ elif ':' in self.host and self.host.count(':') == 1:
+ hostname, _, hostport = self.host.partition(':')
+ try:
+ port = int(hostport) or port
+ except ValueError:
+ if log_enabled(ERROR):
+ log(ERROR, 'port <%s> must be an integer', port)
+ raise LDAPInvalidPortError('port must be an integer')
+ self.host = hostname
+ elif url_given and self.host.startswith('['):
+ hostname, sep, hostport = self.host[1:].partition(']')
+ if sep != ']' or not self._is_ipv6(hostname):
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid IPv6 server address for <%s>', self.host)
+ raise LDAPInvalidServerError()
+ if len(hostport):
+ if not hostport.startswith(':'):
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid URL in server name for <%s>', self.host)
+ raise LDAPInvalidServerError('invalid URL in server name')
+ if not hostport[1:].isdecimal():
+ if log_enabled(ERROR):
+ log(ERROR, 'port must be an integer for <%s>', self.host)
+ raise LDAPInvalidPortError('port must be an integer')
+ port = int(hostport[1:])
+ self.host = hostname
+ elif not url_given and self._is_ipv6(self.host):
+ pass
+ elif self.host.count(':') > 1:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid server address for <%s>', self.host)
+ raise LDAPInvalidServerError()
+
+ if not self.ipc:
+ self.host.rstrip('/')
+ if not use_ssl and not port:
+ port = 389
+ elif use_ssl and not port:
+ port = 636
+
+ port_err = check_port(port)
+ if port_err:
+ if log_enabled(ERROR):
+ log(ERROR, port_err)
+ raise LDAPInvalidPortError(port_err)
+ self.port = port
+
+ if allowed_referral_hosts is None: # defaults to any server with authentication
+ allowed_referral_hosts = [('*', True)]
+
+ if isinstance(allowed_referral_hosts, SEQUENCE_TYPES):
+ self.allowed_referral_hosts = []
+ for referral_host in allowed_referral_hosts:
+ if isinstance(referral_host, tuple):
+ if isinstance(referral_host[1], bool):
+ self.allowed_referral_hosts.append(referral_host)
+ elif isinstance(allowed_referral_hosts, tuple):
+ if isinstance(allowed_referral_hosts[1], bool):
+ self.allowed_referral_hosts = [allowed_referral_hosts]
+ else:
+ self.allowed_referral_hosts = []
+
+ self.ssl = True if use_ssl else False
+ if tls and not isinstance(tls, Tls):
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid tls specification: <%s>', tls)
+ raise LDAPInvalidTlsSpecificationError('invalid Tls object')
+
+ self.tls = Tls() if self.ssl and not tls else tls
+
+ if not self.ipc:
+ if self._is_ipv6(self.host):
+ self.name = ('ldaps' if self.ssl else 'ldap') + '://[' + self.host + ']:' + str(self.port)
+ else:
+ self.name = ('ldaps' if self.ssl else 'ldap') + '://' + self.host + ':' + str(self.port)
+ else:
+ self.name = host
+
+ self.get_info = get_info
+ self._dsa_info = None
+ self._schema_info = None
+ self.dit_lock = Lock()
+ self.custom_formatter = formatter
+ self.custom_validator = validator
+ self._address_info = [] # property self.address_info resolved at open time (or when check_availability is called)
+ self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date ever
+ self.current_address = None
+ self.connect_timeout = connect_timeout
+ self.mode = mode
+
+ self.get_info_from_server(None) # load offline schema if needed
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Server: <%r>', self)
+
+ @staticmethod
+ def _is_ipv6(host):
+ try:
+ socket.inet_pton(socket.AF_INET6, host)
+ except (socket.error, AttributeError, ValueError):
+ return False
+ return True
+
+ def __str__(self):
+ if self.host:
+ s = self.name + (' - ssl' if self.ssl else ' - cleartext') + (' - unix socket' if self.ipc else '')
+ else:
+ s = object.__str__(self)
+ return s
+
+ def __repr__(self):
+ r = 'Server(host={0.host!r}, port={0.port!r}, use_ssl={0.ssl!r}'.format(self)
+ r += '' if not self.allowed_referral_hosts else ', allowed_referral_hosts={0.allowed_referral_hosts!r}'.format(self)
+ r += '' if self.tls is None else ', tls={0.tls!r}'.format(self)
+ r += '' if not self.get_info else ', get_info={0.get_info!r}'.format(self)
+ r += '' if not self.connect_timeout else ', connect_timeout={0.connect_timeout!r}'.format(self)
+ r += '' if not self.mode else ', mode={0.mode!r}'.format(self)
+ r += ')'
+
+ return r
+
+ @property
+ def address_info(self):
+ conf_refresh_interval = get_config_parameter('ADDRESS_INFO_REFRESH_TIME')
+ if not self._address_info or (datetime.now() - self._address_info_resolved_time).seconds > conf_refresh_interval:
+ # converts addresses tuple to list and adds a 6th parameter for availability (None = not checked, True = available, False=not available) and a 7th parameter for the checking time
+ addresses = None
+ try:
+ if self.ipc:
+ addresses = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, None, self.host, None)]
+ else:
+ if self.mode == IP_V4_ONLY:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
+ elif self.mode == IP_V6_ONLY:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
+ else:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP, socket.AI_ADDRCONFIG | socket.AI_V4MAPPED)
+ except (socket.gaierror, AttributeError):
+ pass
+
+ if not addresses: # if addresses not found or raised an exception (for example for bad flags) tries again without flags
+ try:
+ if self.mode == IP_V4_ONLY:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
+ elif self.mode == IP_V6_ONLY:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_INET6, socket.SOCK_STREAM, socket.IPPROTO_TCP)
+ else:
+ addresses = socket.getaddrinfo(self.host, self.port, socket.AF_UNSPEC, socket.SOCK_STREAM, socket.IPPROTO_TCP)
+ except socket.gaierror:
+ pass
+
+ if addresses:
+ self._address_info = [list(address) + [None, None] for address in addresses]
+ self._address_info_resolved_time = datetime.now()
+ else:
+ self._address_info = []
+ self._address_info_resolved_time = datetime(MINYEAR, 1, 1) # smallest date
+
+ if log_enabled(BASIC):
+ for address in self._address_info:
+ log(BASIC, 'address for <%s> resolved as <%r>', self, address[:-2])
+ return self._address_info
+
+ def update_availability(self, address, available):
+ cont = 0
+ while cont < len(self._address_info):
+ if self.address_info[cont] == address:
+ self._address_info[cont][5] = True if available else False
+ self._address_info[cont][6] = datetime.now()
+ break
+ cont += 1
+
+ def reset_availability(self):
+ for address in self._address_info:
+ address[5] = None
+ address[6] = None
+
+ def check_availability(self, source_address=None, source_port=None, source_port_list=None):
+ """
+ Tries to open, connect and close a socket to specified address and port to check availability.
+ Timeout in seconds is specified in CHECK_AVAILABITY_TIMEOUT if not specified in
+ the Server object.
+ If specified, use a specific address, port, or list of possible ports, when attempting to check availability.
+ NOTE: This will only consider multiple ports from the source port list if the first ones we try to bind to are
+ already in use. This will not attempt using different ports in the list if the server is unavailable,
+ as that could result in the runtime of check_availability significantly exceeding the connection timeout.
+ """
+ source_port_err = check_port_and_port_list(source_port, source_port_list)
+ if source_port_err:
+ if log_enabled(ERROR):
+ log(ERROR, source_port_err)
+ raise LDAPInvalidPortError(source_port_err)
+
+ # using an empty string to bind a socket means "use the default as if this wasn't provided" because socket
+ # binding requires that you pass something for the ip if you want to pass a specific port
+ bind_address = source_address if source_address is not None else ''
+ # using 0 as the source port to bind a socket means "use the default behavior of picking a random port from
+ # all ports as if this wasn't provided" because socket binding requires that you pass something for the port
+ # if you want to pass a specific ip
+ candidate_bind_ports = [0]
+
+ # if we have either a source port or source port list, convert that into our candidate list
+ if source_port is not None:
+ candidate_bind_ports = [source_port]
+ elif source_port_list is not None:
+ candidate_bind_ports = source_port_list[:]
+
+ conf_availability_timeout = get_config_parameter('CHECK_AVAILABILITY_TIMEOUT')
+ available = False
+ self.reset_availability()
+ for address in self.candidate_addresses():
+ available = True
+ try:
+ temp_socket = socket.socket(*address[:3])
+
+ # Go through our candidate bind ports and try to bind our socket to our source address with them.
+ # if no source address or ports were specified, this will have the same success/fail result as if we
+ # tried to connect to the remote server without binding locally first.
+ # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket
+ # locally" from "remote server is unavailable" with more clarity, though this will only really be an
+ # issue when no source address/port is specified if the system checking server availability is running
+ # as a very unprivileged user.
+ last_bind_exc = None
+ socket_bind_succeeded = False
+ for bind_port in candidate_bind_ports:
+ try:
+ temp_socket.bind((bind_address, bind_port))
+ socket_bind_succeeded = True
+ break
+ except Exception as bind_ex:
+ last_bind_exc = bind_ex
+ if log_enabled(NETWORK):
+ log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>',
+ bind_address, bind_port, bind_ex)
+ if not socket_bind_succeeded:
+ if log_enabled(ERROR):
+ log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> due to <%s>',
+ bind_address, candidate_bind_ports, last_bind_exc)
+ raise LDAPSocketOpenError('Unable to bind socket locally to address {} with any of the source ports {} due to {}'
+ .format(bind_address, candidate_bind_ports, last_bind_exc))
+
+ if self.connect_timeout:
+ temp_socket.settimeout(self.connect_timeout)
+ else:
+ temp_socket.settimeout(conf_availability_timeout) # set timeout for checking availability to default
+ try:
+ temp_socket.connect(address[4])
+ except socket.error:
+ available = False
+ finally:
+ try:
+ temp_socket.shutdown(socket.SHUT_RDWR)
+ except socket.error:
+ available = False
+ finally:
+ temp_socket.close()
+ except socket.gaierror:
+ available = False
+
+ if available:
+ if log_enabled(BASIC):
+ log(BASIC, 'server <%s> available at <%r>', self, address)
+ self.update_availability(address, True)
+ break # if an available address is found exits immediately
+ else:
+ self.update_availability(address, False)
+ if log_enabled(ERROR):
+ log(ERROR, 'server <%s> not available at <%r>', self, address)
+
+ return available
+
+ @staticmethod
+ def next_message_id():
+ """
+ LDAP messageId is unique for all connections to same server
+ """
+ with Server._message_id_lock:
+ Server._message_counter += 1
+ if Server._message_counter >= LDAP_MAX_INT:
+ Server._message_counter = 1
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'new message id <%d> generated', Server._message_counter)
+
+ return Server._message_counter
+
+ def _get_dsa_info(self, connection):
+ """
+ Retrieve DSE operational attribute as per RFC4512 (5.1).
+ """
+ if connection.strategy.no_real_dsa: # do not try for mock strategies
+ return
+
+ if not connection.strategy.pooled: # in pooled strategies get_dsa_info is performed by the worker threads
+ result = connection.search(search_base='',
+ search_filter='(objectClass=*)',
+ search_scope=BASE,
+ attributes=['altServer', # requests specific dsa info attributes
+ 'namingContexts',
+ 'supportedControl',
+ 'supportedExtension',
+ 'supportedFeatures',
+ 'supportedCapabilities',
+ 'supportedLdapVersion',
+ 'supportedSASLMechanisms',
+ 'vendorName',
+ 'vendorVersion',
+ 'subschemaSubentry',
+ '*',
+ '+'], # requests all remaining attributes (other),
+ get_operational_attributes=True)
+
+ with self.dit_lock:
+ if isinstance(result, bool): # sync request
+ self._dsa_info = DsaInfo(connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else self._dsa_info
+ elif result: # asynchronous request, must check if attributes in response
+ results, _ = connection.get_response(result)
+ if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
+ self._dsa_info = DsaInfo(results[0]['attributes'], results[0]['raw_attributes'])
+
+ if log_enabled(BASIC):
+ log(BASIC, 'DSA info read for <%s> via <%s>', self, connection)
+
+ def _get_schema_info(self, connection, entry=''):
+ """
+ Retrieve schema from subschemaSubentry DSE attribute, per RFC
+ 4512 (4.4 and 5.1); entry = '' means DSE.
+ """
+ if connection.strategy.no_real_dsa: # do not try for mock strategies
+ return
+
+ schema_entry = None
+ if self._dsa_info and entry == '': # subschemaSubentry already present in dsaInfo
+ if isinstance(self._dsa_info.schema_entry, SEQUENCE_TYPES):
+ schema_entry = self._dsa_info.schema_entry[0] if self._dsa_info.schema_entry else None
+ else:
+ schema_entry = self._dsa_info.schema_entry if self._dsa_info.schema_entry else None
+ else:
+ result = connection.search(entry, '(objectClass=*)', BASE, attributes=['subschemaSubentry'], get_operational_attributes=True)
+ if isinstance(result, bool): # sync request
+ if result and 'subschemaSubentry' in connection.response[0]['raw_attributes']:
+ if len(connection.response[0]['raw_attributes']['subschemaSubentry']) > 0:
+ schema_entry = connection.response[0]['raw_attributes']['subschemaSubentry'][0]
+ else: # asynchronous request, must check if subschemaSubentry in attributes
+ results, _ = connection.get_response(result)
+ if len(results) == 1 and 'raw_attributes' in results[0] and 'subschemaSubentry' in results[0]['attributes']:
+ if len(results[0]['raw_attributes']['subschemaSubentry']) > 0:
+ schema_entry = results[0]['raw_attributes']['subschemaSubentry'][0]
+
+ if schema_entry and not connection.strategy.pooled: # in pooled strategies get_schema_info is performed by the worker threads
+ if isinstance(schema_entry, bytes) and str is not bytes: # Python 3
+ schema_entry = to_unicode(schema_entry, from_server=True)
+ result = connection.search(schema_entry,
+ search_filter='(objectClass=subschema)',
+ search_scope=BASE,
+ attributes=['objectClasses', # requests specific subschema attributes
+ 'attributeTypes',
+ 'ldapSyntaxes',
+ 'matchingRules',
+ 'matchingRuleUse',
+ 'dITContentRules',
+ 'dITStructureRules',
+ 'nameForms',
+ 'createTimestamp',
+ 'modifyTimestamp',
+ '*'], # requests all remaining attributes (other)
+ get_operational_attributes=True
+ )
+ with self.dit_lock:
+ self._schema_info = None
+ if result:
+ if isinstance(result, bool): # sync request
+ self._schema_info = SchemaInfo(schema_entry, connection.response[0]['attributes'], connection.response[0]['raw_attributes']) if result else None
+ else: # asynchronous request, must check if attributes in response
+ results, result = connection.get_response(result)
+ if len(results) == 1 and 'attributes' in results[0] and 'raw_attributes' in results[0]:
+ self._schema_info = SchemaInfo(schema_entry, results[0]['attributes'], results[0]['raw_attributes'])
+ if self._schema_info and not self._schema_info.is_valid(): # flaky servers can return an empty schema, checks if it is so and set schema to None
+ self._schema_info = None
+ if self._schema_info: # if schema is valid tries to apply formatter to the "other" dict with raw values for schema and info
+ for attribute in self._schema_info.other:
+ self._schema_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._schema_info.raw[attribute], self.custom_formatter)
+ if self._dsa_info: # try to apply formatter to the "other" dict with dsa info raw values
+ for attribute in self._dsa_info.other:
+ self._dsa_info.other[attribute] = format_attribute_values(self._schema_info, attribute, self._dsa_info.raw[attribute], self.custom_formatter)
+ if log_enabled(BASIC):
+ log(BASIC, 'schema read for <%s> via <%s>', self, connection)
+
+ def get_info_from_server(self, connection):
+ """
+ reads info from DSE and from subschema
+ """
+ if connection and not connection.closed:
+ if self.get_info in [DSA, ALL]:
+ self._get_dsa_info(connection)
+ if self.get_info in [SCHEMA, ALL]:
+ self._get_schema_info(connection)
+ elif self.get_info == OFFLINE_EDIR_8_8_8:
+ from ..protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
+ self.attach_schema_info(SchemaInfo.from_json(edir_8_8_8_schema))
+ self.attach_dsa_info(DsaInfo.from_json(edir_8_8_8_dsa_info))
+ elif self.get_info == OFFLINE_EDIR_9_1_4:
+ from ..protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info
+ self.attach_schema_info(SchemaInfo.from_json(edir_9_1_4_schema))
+ self.attach_dsa_info(DsaInfo.from_json(edir_9_1_4_dsa_info))
+ elif self.get_info == OFFLINE_AD_2012_R2:
+ from ..protocol.schemas.ad2012R2 import ad_2012_r2_schema, ad_2012_r2_dsa_info
+ self.attach_schema_info(SchemaInfo.from_json(ad_2012_r2_schema))
+ self.attach_dsa_info(DsaInfo.from_json(ad_2012_r2_dsa_info))
+ elif self.get_info == OFFLINE_SLAPD_2_4:
+ from ..protocol.schemas.slapd24 import slapd_2_4_schema, slapd_2_4_dsa_info
+ self.attach_schema_info(SchemaInfo.from_json(slapd_2_4_schema))
+ self.attach_dsa_info(DsaInfo.from_json(slapd_2_4_dsa_info))
+ elif self.get_info == OFFLINE_DS389_1_3_3:
+ from ..protocol.schemas.ds389 import ds389_1_3_3_schema, ds389_1_3_3_dsa_info
+ self.attach_schema_info(SchemaInfo.from_json(ds389_1_3_3_schema))
+ self.attach_dsa_info(DsaInfo.from_json(ds389_1_3_3_dsa_info))
+
+ def attach_dsa_info(self, dsa_info=None):
+ if isinstance(dsa_info, DsaInfo):
+ self._dsa_info = dsa_info
+ if log_enabled(BASIC):
+ log(BASIC, 'attached DSA info to Server <%s>', self)
+
+ def attach_schema_info(self, dsa_schema=None):
+ if isinstance(dsa_schema, SchemaInfo):
+ self._schema_info = dsa_schema
+ if log_enabled(BASIC):
+ log(BASIC, 'attached schema info to Server <%s>', self)
+
+ @property
+ def info(self):
+ return self._dsa_info
+
+ @property
+ def schema(self):
+ return self._schema_info
+
+ @staticmethod
+ def from_definition(host, dsa_info, dsa_schema, port=None, use_ssl=False, formatter=None, validator=None):
+ """
+ Define a dummy server with preloaded schema and info
+ :param host: host name
+ :param dsa_info: DsaInfo preloaded object or a json formatted string or a file name
+ :param dsa_schema: SchemaInfo preloaded object or a json formatted string or a file name
+ :param port: fake port
+ :param use_ssl: use_ssl
+ :param formatter: custom formatters
+ :return: Server object
+ """
+ if isinstance(host, SEQUENCE_TYPES):
+ dummy = Server(host=host[0], port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL) # for ServerPool object
+ else:
+ dummy = Server(host=host, port=port, use_ssl=use_ssl, formatter=formatter, validator=validator, get_info=ALL)
+ if isinstance(dsa_info, DsaInfo):
+ dummy._dsa_info = dsa_info
+ elif isinstance(dsa_info, STRING_TYPES):
+ try:
+ dummy._dsa_info = DsaInfo.from_json(dsa_info) # tries to use dsa_info as a json configuration string
+ except Exception:
+ dummy._dsa_info = DsaInfo.from_file(dsa_info) # tries to use dsa_info as a file name
+
+ if not dummy.info:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid DSA info for %s', host)
+ raise LDAPDefinitionError('invalid dsa info')
+
+ if isinstance(dsa_schema, SchemaInfo):
+ dummy._schema_info = dsa_schema
+ elif isinstance(dsa_schema, STRING_TYPES):
+ try:
+ dummy._schema_info = SchemaInfo.from_json(dsa_schema)
+ except Exception:
+ dummy._schema_info = SchemaInfo.from_file(dsa_schema)
+
+ if not dummy.schema:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid schema info for %s', host)
+ raise LDAPDefinitionError('invalid schema info')
+
+ if log_enabled(BASIC):
+ log(BASIC, 'created server <%s> from definition', dummy)
+
+ return dummy
+
+ def candidate_addresses(self):
+ conf_reset_availability_timeout = get_config_parameter('RESET_AVAILABILITY_TIMEOUT')
+ if self.ipc:
+ candidates = self.address_info
+ if log_enabled(BASIC):
+ log(BASIC, 'candidate address for <%s>: <%s> with mode UNIX_SOCKET', self, self.name)
+ else:
+ # checks reset availability timeout
+ for address in self.address_info:
+ if address[6] and ((datetime.now() - address[6]).seconds > conf_reset_availability_timeout):
+ address[5] = None
+ address[6] = None
+
+ # selects server address based on server mode and availability (in address[5])
+ addresses = self.address_info[:] # copy to avoid refreshing while searching candidates
+ candidates = []
+ if addresses:
+ if self.mode == IP_SYSTEM_DEFAULT:
+ candidates.append(addresses[0])
+ elif self.mode == IP_V4_ONLY:
+ candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
+ elif self.mode == IP_V6_ONLY:
+ candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
+ elif self.mode == IP_V4_PREFERRED:
+ candidates = [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
+ candidates += [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
+ elif self.mode == IP_V6_PREFERRED:
+ candidates = [address for address in addresses if address[0] == socket.AF_INET6 and (address[5] or address[5] is None)]
+ candidates += [address for address in addresses if address[0] == socket.AF_INET and (address[5] or address[5] is None)]
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid server mode for <%s>', self)
+ raise LDAPInvalidServerError('invalid server mode')
+
+ if log_enabled(BASIC):
+ for candidate in candidates:
+ log(BASIC, 'obtained candidate address for <%s>: <%r> with mode %s', self, candidate[:-2], self.mode)
+ return candidates
+
+ def _check_info_property(self, kind, name):
+ if not self._dsa_info:
+ raise LDAPInfoError('server info not loaded')
+
+ if kind == 'control':
+ properties = self.info.supported_controls
+ elif kind == 'extension':
+ properties = self.info.supported_extensions
+ elif kind == 'feature':
+ properties = self.info.supported_features
+ else:
+ raise LDAPInfoError('invalid info category')
+
+ for prop in properties:
+ if name == prop[0] or (prop[2] and name.lower() == prop[2].lower()): # checks oid and description
+ return True
+
+ return False
+
+ def has_control(self, control):
+ return self._check_info_property('control', control)
+
+ def has_extension(self, extension):
+ return self._check_info_property('extension', extension)
+
+ def has_feature(self, feature):
+ return self._check_info_property('feature', feature)
+
+
+
diff --git a/ldap3/core/timezone.py b/ldap3/core/timezone.py
index 728f73b..0c24a77 100644
--- a/ldap3/core/timezone.py
+++ b/ldap3/core/timezone.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/core/tls.py b/ldap3/core/tls.py
index aa52f9e..1539b9f 100644
--- a/ldap3/core/tls.py
+++ b/ldap3/core/tls.py
@@ -1,326 +1,327 @@
-"""
-"""
-
-# Created on 2013.08.05
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory
-from .. import SEQUENCE_TYPES
-from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
-
-try:
- # noinspection PyUnresolvedReferences
- import ssl
-except ImportError:
- if log_enabled(ERROR):
- log(ERROR, 'SSL not supported in this Python interpreter')
- raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter')
-
-try:
- from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities
-except ImportError:
- from ..utils.tls_backport import CertificateError
- from ..utils.tls_backport import match_hostname
- if log_enabled(BASIC):
- log(BASIC, 'using tls_backport')
-
-try: # try to use SSLContext
- # noinspection PyUnresolvedReferences
- from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9
- use_ssl_context = True
-except ImportError:
- use_ssl_context = False
- if log_enabled(BASIC):
- log(BASIC, 'SSLContext unavailable')
-
-from os import path
-
-
-# noinspection PyProtectedMember
-class Tls(object):
- """
- tls/ssl configuration for Server object
- Starting from python 2.7.9 and python 3.4 uses the SSLContext object
- that tries to read the CAs defined at system level
- ca_certs_path and ca_certs_data are valid only when using SSLContext
- local_private_key_password is valid only when using SSLContext
- sni is the server name for Server Name Indication (when available)
- """
-
- def __init__(self,
- local_private_key_file=None,
- local_certificate_file=None,
- validate=ssl.CERT_NONE,
- version=None,
- ca_certs_file=None,
- valid_names=None,
- ca_certs_path=None,
- ca_certs_data=None,
- local_private_key_password=None,
- ciphers=None,
- sni=None):
-
- if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]:
- self.validate = validate
- elif validate:
- if log_enabled(ERROR):
- log(ERROR, 'invalid validate parameter <%s>', validate)
- raise LDAPSSLConfigurationError('invalid validate parameter')
- if ca_certs_file and path.exists(ca_certs_file):
- self.ca_certs_file = ca_certs_file
- elif ca_certs_file:
- if log_enabled(ERROR):
- log(ERROR, 'invalid CA public key file <%s>', ca_certs_file)
- raise LDAPSSLConfigurationError('invalid CA public key file')
- else:
- self.ca_certs_file = None
-
- if ca_certs_path and use_ssl_context and path.exists(ca_certs_path):
- self.ca_certs_path = ca_certs_path
- elif ca_certs_path and not use_ssl_context:
- if log_enabled(ERROR):
- log(ERROR, 'cannot use CA public keys path, SSLContext not available')
- raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available')
- elif ca_certs_path:
- if log_enabled(ERROR):
- log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path)
- raise LDAPSSLConfigurationError('invalid CA public keys path')
- else:
- self.ca_certs_path = None
-
- if ca_certs_data and use_ssl_context:
- self.ca_certs_data = ca_certs_data
- elif ca_certs_data:
- if log_enabled(ERROR):
- log(ERROR, 'cannot use CA data, SSLContext not available')
- raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available')
- else:
- self.ca_certs_data = None
-
- if local_private_key_password and use_ssl_context:
- self.private_key_password = local_private_key_password
- elif local_private_key_password:
- if log_enabled(ERROR):
- log(ERROR, 'cannot use local private key password, SSLContext not available')
- raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available')
- else:
- self.private_key_password = None
-
- self.version = version
- self.private_key_file = local_private_key_file
- self.certificate_file = local_certificate_file
- self.valid_names = valid_names
- self.ciphers = ciphers
- self.sni = sni
-
- if log_enabled(BASIC):
- log(BASIC, 'instantiated Tls: <%r>' % self)
-
- def __str__(self):
- s = [
- 'protocol: ' + str(self.version),
- 'client private key: ' + ('present ' if self.private_key_file else 'not present'),
- 'client certificate: ' + ('present ' if self.certificate_file else 'not present'),
- 'private key password: ' + ('present ' if self.private_key_password else 'not present'),
- 'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'),
- 'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'),
- 'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'),
- 'verify mode: ' + str(self.validate),
- 'valid names: ' + str(self.valid_names),
- 'ciphers: ' + str(self.ciphers),
- 'sni: ' + str(self.sni)
- ]
- return ' - '.join(s)
-
- def __repr__(self):
- r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self)
- r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self)
- r += '' if self.validate is None else ', validate={0.validate!r}'.format(self)
- r += '' if self.version is None else ', version={0.version!r}'.format(self)
- r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self)
- r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self)
- r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self)
- r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self)
- r += '' if self.sni is None else ', sni={0.sni!r}'.format(self)
- r = 'Tls(' + r[2:] + ')'
- return r
-
- def wrap_socket(self, connection, do_handshake=False):
- """
- Adds TLS to the connection socket
- """
- if use_ssl_context:
- if self.version is None: # uses the default ssl context for reasonable security
- ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH,
- cafile=self.ca_certs_file,
- capath=self.ca_certs_path,
- cadata=self.ca_certs_data)
- else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version
- ssl_context = ssl.SSLContext(self.version)
- if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data:
- ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data)
- elif self.validate != ssl.CERT_NONE:
- ssl_context.load_default_certs(Purpose.SERVER_AUTH)
-
- if self.certificate_file:
- ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password)
- ssl_context.check_hostname = False
- ssl_context.verify_mode = self.validate
-
- if self.ciphers:
- try:
- ssl_context.set_ciphers(self.ciphers)
- except ssl.SSLError:
- pass
-
- if self.sni:
- wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni)
- else:
- wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake)
- if log_enabled(NETWORK):
- log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection)
- else:
- if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'):
- self.version = ssl.PROTOCOL_SSLv23
- if self.ciphers:
- try:
-
- wrapped_socket = ssl.wrap_socket(connection.socket,
- keyfile=self.private_key_file,
- certfile=self.certificate_file,
- server_side=False,
- cert_reqs=self.validate,
- ssl_version=self.version,
- ca_certs=self.ca_certs_file,
- do_handshake_on_connect=do_handshake,
- ciphers=self.ciphers)
- except ssl.SSLError:
- raise
- except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None
- self.ciphers = None
-
- if not self.ciphers:
- wrapped_socket = ssl.wrap_socket(connection.socket,
- keyfile=self.private_key_file,
- certfile=self.certificate_file,
- server_side=False,
- cert_reqs=self.validate,
- ssl_version=self.version,
- ca_certs=self.ca_certs_file,
- do_handshake_on_connect=do_handshake)
- if log_enabled(NETWORK):
- log(NETWORK, 'socket wrapped with SSL for <%s>', connection)
-
- if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL):
- check_hostname(wrapped_socket, connection.server.host, self.valid_names)
-
- connection.socket = wrapped_socket
- return
-
- def start_tls(self, connection):
- if connection.server.ssl: # ssl already established at server level
- return False
-
- if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress:
- # Per RFC 4513 (3.1.1)
- if log_enabled(ERROR):
- log(ERROR, "can't start tls because operations are in progress for <%s>", self)
- return False
- connection.starting_tls = True
- if log_enabled(BASIC):
- log(BASIC, 'starting tls for <%s>', connection)
- if not connection.strategy.sync:
- connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response
- result = connection.extended('1.3.6.1.4.1.1466.20037')
- if not connection.strategy.sync:
- # asynchronous - _start_tls must be executed by the strategy
- response = connection.get_response(result)
- if response != (None, None):
- if log_enabled(BASIC):
- log(BASIC, 'tls started for <%s>', connection)
- return True
- else:
- if log_enabled(BASIC):
- log(BASIC, 'tls not started for <%s>', connection)
- return False
- else:
- if connection.result['description'] not in ['success']:
- # startTLS failed
- connection.last_error = 'startTLS failed - ' + str(connection.result['description'])
- if log_enabled(ERROR):
- log(ERROR, '%s for <%s>', connection.last_error, connection)
- raise LDAPStartTLSError(connection.last_error)
- if log_enabled(BASIC):
- log(BASIC, 'tls started for <%s>', connection)
- return self._start_tls(connection)
-
- def _start_tls(self, connection):
- exc = None
- try:
- self.wrap_socket(connection, do_handshake=True)
- except Exception as e:
- connection.last_error = 'wrap socket error: ' + str(e)
- exc = e
-
- connection.starting_tls = False
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection)
- raise start_tls_exception_factory(LDAPStartTLSError, exc)(connection.last_error)
-
- if connection.usage:
- connection._usage.wrapped_sockets += 1
-
- connection.tls_started = True
- return True
-
-
-def check_hostname(sock, server_name, additional_names):
- server_certificate = sock.getpeercert()
- if log_enabled(NETWORK):
- log(NETWORK, 'certificate found for %s: %s', sock, server_certificate)
- if additional_names:
- host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names])
- else:
- host_names = [server_name]
-
- for host_name in host_names:
- if not host_name:
- continue
- elif host_name == '*':
- if log_enabled(NETWORK):
- log(NETWORK, 'certificate matches * wildcard')
- return # valid
-
- try:
- match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name
- if log_enabled(NETWORK):
- log(NETWORK, 'certificate matches host name <%s>', host_name)
- return # valid
- except CertificateError as e:
- if log_enabled(NETWORK):
- log(NETWORK, str(e))
-
- if log_enabled(ERROR):
- log(ERROR, "hostname doesn't match certificate")
- raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names)))
+"""
+"""
+
+# Created on 2013.08.05
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from .exceptions import LDAPSSLNotSupportedError, LDAPSSLConfigurationError, LDAPStartTLSError, LDAPCertificateError, start_tls_exception_factory
+from .. import SEQUENCE_TYPES
+from ..utils.log import log, log_enabled, ERROR, BASIC, NETWORK
+
+try:
+ # noinspection PyUnresolvedReferences
+ import ssl
+except ImportError:
+ if log_enabled(ERROR):
+ log(ERROR, 'SSL not supported in this Python interpreter')
+ raise LDAPSSLNotSupportedError('SSL not supported in this Python interpreter')
+
+try:
+ from ssl import match_hostname, CertificateError # backport for python2 missing ssl functionalities
+except ImportError:
+ from ..utils.tls_backport import CertificateError
+ from ..utils.tls_backport import match_hostname
+ if log_enabled(BASIC):
+ log(BASIC, 'using tls_backport')
+
+try: # try to use SSLContext
+ # noinspection PyUnresolvedReferences
+ from ssl import create_default_context, Purpose # defined in Python 3.4 and Python 2.7.9
+ use_ssl_context = True
+except ImportError:
+ use_ssl_context = False
+ if log_enabled(BASIC):
+ log(BASIC, 'SSLContext unavailable')
+
+from os import path
+
+
+# noinspection PyProtectedMember
+class Tls(object):
+ """
+ tls/ssl configuration for Server object
+ Starting from python 2.7.9 and python 3.4 uses the SSLContext object
+ that tries to read the CAs defined at system level
+ ca_certs_path and ca_certs_data are valid only when using SSLContext
+ local_private_key_password is valid only when using SSLContext
+ ssl_options is valid only when using SSLContext
+ sni is the server name for Server Name Indication (when available)
+ """
+
+ def __init__(self,
+ local_private_key_file=None,
+ local_certificate_file=None,
+ validate=ssl.CERT_NONE,
+ version=None,
+ ssl_options=None,
+ ca_certs_file=None,
+ valid_names=None,
+ ca_certs_path=None,
+ ca_certs_data=None,
+ local_private_key_password=None,
+ ciphers=None,
+ sni=None):
+ if ssl_options is None:
+ ssl_options = []
+ self.ssl_options = ssl_options
+ if validate in [ssl.CERT_NONE, ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED]:
+ self.validate = validate
+ elif validate:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid validate parameter <%s>', validate)
+ raise LDAPSSLConfigurationError('invalid validate parameter')
+ if ca_certs_file and path.exists(ca_certs_file):
+ self.ca_certs_file = ca_certs_file
+ elif ca_certs_file:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid CA public key file <%s>', ca_certs_file)
+ raise LDAPSSLConfigurationError('invalid CA public key file')
+ else:
+ self.ca_certs_file = None
+
+ if ca_certs_path and use_ssl_context and path.exists(ca_certs_path):
+ self.ca_certs_path = ca_certs_path
+ elif ca_certs_path and not use_ssl_context:
+ if log_enabled(ERROR):
+ log(ERROR, 'cannot use CA public keys path, SSLContext not available')
+ raise LDAPSSLNotSupportedError('cannot use CA public keys path, SSLContext not available')
+ elif ca_certs_path:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid CA public keys path <%s>', ca_certs_path)
+ raise LDAPSSLConfigurationError('invalid CA public keys path')
+ else:
+ self.ca_certs_path = None
+
+ if ca_certs_data and use_ssl_context:
+ self.ca_certs_data = ca_certs_data
+ elif ca_certs_data:
+ if log_enabled(ERROR):
+ log(ERROR, 'cannot use CA data, SSLContext not available')
+ raise LDAPSSLNotSupportedError('cannot use CA data, SSLContext not available')
+ else:
+ self.ca_certs_data = None
+
+ if local_private_key_password and use_ssl_context:
+ self.private_key_password = local_private_key_password
+ elif local_private_key_password:
+ if log_enabled(ERROR):
+ log(ERROR, 'cannot use local private key password, SSLContext not available')
+ raise LDAPSSLNotSupportedError('cannot use local private key password, SSLContext is not available')
+ else:
+ self.private_key_password = None
+
+ self.version = version
+ self.private_key_file = local_private_key_file
+ self.certificate_file = local_certificate_file
+ self.valid_names = valid_names
+ self.ciphers = ciphers
+ self.sni = sni
+
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated Tls: <%r>' % self)
+
+ def __str__(self):
+ s = [
+ 'protocol: ' + str(self.version),
+ 'client private key: ' + ('present ' if self.private_key_file else 'not present'),
+ 'client certificate: ' + ('present ' if self.certificate_file else 'not present'),
+ 'private key password: ' + ('present ' if self.private_key_password else 'not present'),
+ 'CA certificates file: ' + ('present ' if self.ca_certs_file else 'not present'),
+ 'CA certificates path: ' + ('present ' if self.ca_certs_path else 'not present'),
+ 'CA certificates data: ' + ('present ' if self.ca_certs_data else 'not present'),
+ 'verify mode: ' + str(self.validate),
+ 'valid names: ' + str(self.valid_names),
+ 'ciphers: ' + str(self.ciphers),
+ 'sni: ' + str(self.sni)
+ ]
+ return ' - '.join(s)
+
+ def __repr__(self):
+ r = '' if self.private_key_file is None else ', local_private_key_file={0.private_key_file!r}'.format(self)
+ r += '' if self.certificate_file is None else ', local_certificate_file={0.certificate_file!r}'.format(self)
+ r += '' if self.validate is None else ', validate={0.validate!r}'.format(self)
+ r += '' if self.version is None else ', version={0.version!r}'.format(self)
+ r += '' if self.ca_certs_file is None else ', ca_certs_file={0.ca_certs_file!r}'.format(self)
+ r += '' if self.ca_certs_path is None else ', ca_certs_path={0.ca_certs_path!r}'.format(self)
+ r += '' if self.ca_certs_data is None else ', ca_certs_data={0.ca_certs_data!r}'.format(self)
+ r += '' if self.ciphers is None else ', ciphers={0.ciphers!r}'.format(self)
+ r += '' if self.sni is None else ', sni={0.sni!r}'.format(self)
+ r = 'Tls(' + r[2:] + ')'
+ return r
+
+ def wrap_socket(self, connection, do_handshake=False):
+ """
+ Adds TLS to the connection socket
+ """
+ if use_ssl_context:
+ if self.version is None: # uses the default ssl context for reasonable security
+ ssl_context = create_default_context(purpose=Purpose.SERVER_AUTH,
+ cafile=self.ca_certs_file,
+ capath=self.ca_certs_path,
+ cadata=self.ca_certs_data)
+ else: # code from create_default_context in the Python standard library 3.5.1, creates a ssl context with the specificd protocol version
+ ssl_context = ssl.SSLContext(self.version)
+ if self.ca_certs_file or self.ca_certs_path or self.ca_certs_data:
+ ssl_context.load_verify_locations(self.ca_certs_file, self.ca_certs_path, self.ca_certs_data)
+ elif self.validate != ssl.CERT_NONE:
+ ssl_context.load_default_certs(Purpose.SERVER_AUTH)
+
+ if self.certificate_file:
+ ssl_context.load_cert_chain(self.certificate_file, keyfile=self.private_key_file, password=self.private_key_password)
+ ssl_context.check_hostname = False
+ ssl_context.verify_mode = self.validate
+ for option in self.ssl_options:
+ ssl_context.options |= option
+
+ if self.ciphers:
+ try:
+ ssl_context.set_ciphers(self.ciphers)
+ except ssl.SSLError:
+ pass
+
+ if self.sni:
+ wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake, server_hostname=self.sni)
+ else:
+ wrapped_socket = ssl_context.wrap_socket(connection.socket, server_side=False, do_handshake_on_connect=do_handshake)
+ if log_enabled(NETWORK):
+ log(NETWORK, 'socket wrapped with SSL using SSLContext for <%s>', connection)
+ else:
+ if self.version is None and hasattr(ssl, 'PROTOCOL_SSLv23'):
+ self.version = ssl.PROTOCOL_SSLv23
+ if self.ciphers:
+ try:
+
+ wrapped_socket = ssl.wrap_socket(connection.socket,
+ keyfile=self.private_key_file,
+ certfile=self.certificate_file,
+ server_side=False,
+ cert_reqs=self.validate,
+ ssl_version=self.version,
+ ca_certs=self.ca_certs_file,
+ do_handshake_on_connect=do_handshake,
+ ciphers=self.ciphers)
+ except ssl.SSLError:
+ raise
+ except TypeError: # in python2.6 no ciphers argument is present, failback to self.ciphers=None
+ self.ciphers = None
+
+ if not self.ciphers:
+ wrapped_socket = ssl.wrap_socket(connection.socket,
+ keyfile=self.private_key_file,
+ certfile=self.certificate_file,
+ server_side=False,
+ cert_reqs=self.validate,
+ ssl_version=self.version,
+ ca_certs=self.ca_certs_file,
+ do_handshake_on_connect=do_handshake)
+ if log_enabled(NETWORK):
+ log(NETWORK, 'socket wrapped with SSL for <%s>', connection)
+
+ if do_handshake and (self.validate == ssl.CERT_REQUIRED or self.validate == ssl.CERT_OPTIONAL):
+ check_hostname(wrapped_socket, connection.server.host, self.valid_names)
+
+ connection.socket = wrapped_socket
+ return
+
+ def start_tls(self, connection):
+ if connection.server.ssl: # ssl already established at server level
+ return False
+
+ if (connection.tls_started and not connection._executing_deferred) or connection.strategy._outstanding or connection.sasl_in_progress:
+ # Per RFC 4513 (3.1.1)
+ if log_enabled(ERROR):
+ log(ERROR, "can't start tls because operations are in progress for <%s>", self)
+ return False
+ connection.starting_tls = True
+ if log_enabled(BASIC):
+ log(BASIC, 'starting tls for <%s>', connection)
+ if not connection.strategy.sync:
+ connection._awaiting_for_async_start_tls = True # some flaky servers (OpenLDAP) doesn't return the extended response name in response
+ result = connection.extended('1.3.6.1.4.1.1466.20037')
+ if not connection.strategy.sync:
+ # asynchronous - _start_tls must be executed by the strategy
+ response = connection.get_response(result)
+ if response != (None, None):
+ if log_enabled(BASIC):
+ log(BASIC, 'tls started for <%s>', connection)
+ return True
+ else:
+ if log_enabled(BASIC):
+ log(BASIC, 'tls not started for <%s>', connection)
+ return False
+ else:
+ if connection.result['description'] not in ['success']:
+ # startTLS failed
+ connection.last_error = 'startTLS failed - ' + str(connection.result['description'])
+ if log_enabled(ERROR):
+ log(ERROR, '%s for <%s>', connection.last_error, connection)
+ raise LDAPStartTLSError(connection.last_error)
+ if log_enabled(BASIC):
+ log(BASIC, 'tls started for <%s>', connection)
+ return self._start_tls(connection)
+
+ def _start_tls(self, connection):
+ try:
+ self.wrap_socket(connection, do_handshake=True)
+ except Exception as e:
+ connection.last_error = 'wrap socket error: ' + str(e)
+ if log_enabled(ERROR):
+ log(ERROR, 'error <%s> wrapping socket for TLS in <%s>', connection.last_error, connection)
+ raise start_tls_exception_factory(LDAPStartTLSError, e)(connection.last_error)
+ finally:
+ connection.starting_tls = False
+
+ if connection.usage:
+ connection._usage.wrapped_sockets += 1
+ connection.tls_started = True
+ return True
+
+
+def check_hostname(sock, server_name, additional_names):
+ server_certificate = sock.getpeercert()
+ if log_enabled(NETWORK):
+ log(NETWORK, 'certificate found for %s: %s', sock, server_certificate)
+ if additional_names:
+ host_names = [server_name] + (additional_names if isinstance(additional_names, SEQUENCE_TYPES) else [additional_names])
+ else:
+ host_names = [server_name]
+
+ for host_name in host_names:
+ if not host_name:
+ continue
+ elif host_name == '*':
+ if log_enabled(NETWORK):
+ log(NETWORK, 'certificate matches * wildcard')
+ return # valid
+
+ try:
+ match_hostname(server_certificate, host_name) # raise CertificateError if certificate doesn't match server name
+ if log_enabled(NETWORK):
+ log(NETWORK, 'certificate matches host name <%s>', host_name)
+ return # valid
+ except CertificateError as e:
+ if log_enabled(NETWORK):
+ log(NETWORK, str(e))
+
+ if log_enabled(ERROR):
+ log(ERROR, "hostname doesn't match certificate")
+ raise LDAPCertificateError("certificate %s doesn't match any name in %s " % (server_certificate, str(host_names)))
diff --git a/ldap3/core/usage.py b/ldap3/core/usage.py
index 187d415..7748c76 100644
--- a/ldap3/core/usage.py
+++ b/ldap3/core/usage.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/__init__.py b/ldap3/extend/__init__.py
index 24f426e..32795ef 100644
--- a/ldap3/extend/__init__.py
+++ b/ldap3/extend/__init__.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -169,6 +169,33 @@ class StandardExtendedOperations(ExtendedOperationContainer):
streaming,
callback)
+ def funnel_search(self,
+ search_base='',
+ search_filter='',
+ search_scope=SUBTREE,
+ dereference_aliases=DEREF_NEVER,
+ attributes=ALL_ATTRIBUTES,
+ size_limit=0,
+ time_limit=0,
+ controls=None,
+ streaming=False,
+ callback=None
+ ):
+ return PersistentSearch(self._connection,
+ search_base,
+ search_filter,
+ search_scope,
+ dereference_aliases,
+ attributes,
+ size_limit,
+ time_limit,
+ controls,
+ None,
+ None,
+ None,
+ streaming,
+ callback)
+
class NovellExtendedOperations(ExtendedOperationContainer):
def get_bind_dn(self, controls=None):
diff --git a/ldap3/extend/microsoft/addMembersToGroups.py b/ldap3/extend/microsoft/addMembersToGroups.py
index 28c409f..eaf6cfd 100644
--- a/ldap3/extend/microsoft/addMembersToGroups.py
+++ b/ldap3/extend/microsoft/addMembersToGroups.py
@@ -1,81 +1,93 @@
-"""
-"""
-
-# Created on 2016.12.26
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2016 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-from ...core.exceptions import LDAPInvalidDnError
-from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
-
-
-def ad_add_members_to_groups(connection,
- members_dn,
- groups_dn,
- fix=True):
- """
- :param connection: a bound Connection object
- :param members_dn: the list of members to add to groups
- :param groups_dn: the list of groups where members are to be added
- :param fix: checks for group existence and already assigned members
- :return: a boolean where True means that the operation was successful and False means an error has happened
- Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups.
- Raises LDAPInvalidDnError if members or groups are not found in the DIT.
- """
-
- if not isinstance(members_dn, SEQUENCE_TYPES):
- members_dn = [members_dn]
-
- if not isinstance(groups_dn, SEQUENCE_TYPES):
- groups_dn = [groups_dn]
-
- error = False
- for group in groups_dn:
- if fix: # checks for existance of group and for already assigned members
- result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
-
- if not connection.strategy.sync:
- response, result = connection.get_response(result)
- else:
- response, result = connection.response, connection.result
-
- if not result['description'] == 'success':
- raise LDAPInvalidDnError(group + ' not found')
-
- existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
- existing_members = [element.lower() for element in existing_members]
- else:
- existing_members = []
-
- changes = dict()
- member_to_add = [element for element in members_dn if element.lower() not in existing_members]
- if member_to_add:
- changes['member'] = (MODIFY_ADD, member_to_add)
- if changes:
- result = connection.modify(group, changes)
- if not connection.strategy.sync:
- _, result = connection.get_response(result)
- else:
- result = connection.result
- if result['description'] != 'success':
- error = True
- break
-
- return not error # returns True if no error is raised in the LDAP operations
+"""
+"""
+
+# Created on 2016.12.26
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2016 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from ... import SEQUENCE_TYPES, MODIFY_ADD, BASE, DEREF_NEVER
+from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult
+from ...utils.dn import safe_dn
+
+
+def ad_add_members_to_groups(connection,
+ members_dn,
+ groups_dn,
+ fix=True,
+ raise_error=False):
+ """
+ :param connection: a bound Connection object
+ :param members_dn: the list of members to add to groups
+ :param groups_dn: the list of groups where members are to be added
+ :param fix: checks for group existence and already assigned members
+ :param raise_error: If the operation fails it raises an error instead of returning False
+ :return: a boolean where True means that the operation was successful and False means an error has happened
+ Establishes users-groups relations following the Active Directory rules: users are added to the member attribute of groups.
+ Raises LDAPInvalidDnError if members or groups are not found in the DIT.
+ """
+
+ if not isinstance(members_dn, SEQUENCE_TYPES):
+ members_dn = [members_dn]
+
+ if not isinstance(groups_dn, SEQUENCE_TYPES):
+ groups_dn = [groups_dn]
+
+ if connection.check_names: # builds new lists with sanitized dn
+ members_dn = [safe_dn(member_dn) for member_dn in members_dn]
+ groups_dn = [safe_dn(group_dn) for group_dn in groups_dn]
+
+ error = False
+ for group in groups_dn:
+ if fix: # checks for existance of group and for already assigned members
+ result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER,
+ attributes=['member'])
+
+ if not connection.strategy.sync:
+ response, result = connection.get_response(result)
+ else:
+ response, result = connection.response, connection.result
+
+ if not result['description'] == 'success':
+ raise LDAPInvalidDnError(group + ' not found')
+
+ existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
+ existing_members = [element.lower() for element in existing_members]
+ else:
+ existing_members = []
+
+ changes = dict()
+ member_to_add = [element for element in members_dn if element.lower() not in existing_members]
+ if member_to_add:
+ changes['member'] = (MODIFY_ADD, member_to_add)
+ if changes:
+ result = connection.modify(group, changes)
+ if not connection.strategy.sync:
+ _, result = connection.get_response(result)
+ else:
+ result = connection.result
+ if result['description'] != 'success':
+ error = True
+ result_error_params = ['result', 'description', 'dn', 'message']
+ if raise_error:
+ raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params])
+ break
+
+ return not error # returns True if no error is raised in the LDAP operations
diff --git a/ldap3/extend/microsoft/dirSync.py b/ldap3/extend/microsoft/dirSync.py
index cb18e7a..db403a1 100644
--- a/ldap3/extend/microsoft/dirSync.py
+++ b/ldap3/extend/microsoft/dirSync.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/microsoft/modifyPassword.py b/ldap3/extend/microsoft/modifyPassword.py
index 4a17fb0..0bf1c06 100644
--- a/ldap3/extend/microsoft/modifyPassword.py
+++ b/ldap3/extend/microsoft/modifyPassword.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/microsoft/removeMembersFromGroups.py b/ldap3/extend/microsoft/removeMembersFromGroups.py
index 1b7feb3..0998713 100644
--- a/ldap3/extend/microsoft/removeMembersFromGroups.py
+++ b/ldap3/extend/microsoft/removeMembersFromGroups.py
@@ -1,93 +1,92 @@
-"""
-"""
-
-# Created on 2016.12.26
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2016 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-from ...core.exceptions import LDAPInvalidDnError
-from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
-from ...utils.dn import safe_dn
-
-
-def ad_remove_members_from_groups(connection,
- members_dn,
- groups_dn,
- fix):
- """
- :param connection: a bound Connection object
- :param members_dn: the list of members to remove from groups
- :param groups_dn: the list of groups where members are to be removed
- :param fix: checks for group existence and existing members
- :return: a boolean where True means that the operation was successful and False means an error has happened
- Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute
-
- """
- if not isinstance(members_dn, SEQUENCE_TYPES):
- members_dn = [members_dn]
-
- if not isinstance(groups_dn, SEQUENCE_TYPES):
- groups_dn = [groups_dn]
-
- if connection.check_names: # builds new lists with sanitized dn
- safe_members_dn = []
- safe_groups_dn = []
- for member_dn in members_dn:
- safe_members_dn.append(safe_dn(member_dn))
- for group_dn in groups_dn:
- safe_groups_dn.append(safe_dn(group_dn))
-
- members_dn = safe_members_dn
- groups_dn = safe_groups_dn
-
- error = False
-
- for group in groups_dn:
- if fix: # checks for existance of group and for already assigned members
- result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
-
- if not connection.strategy.sync:
- response, result = connection.get_response(result)
- else:
- response, result = connection.response, connection.result
-
- if not result['description'] == 'success':
- raise LDAPInvalidDnError(group + ' not found')
-
- existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
- else:
- existing_members = members_dn
-
- existing_members = [element.lower() for element in existing_members]
- changes = dict()
- member_to_remove = [element for element in members_dn if element.lower() in existing_members]
- if member_to_remove:
- changes['member'] = (MODIFY_DELETE, member_to_remove)
- if changes:
- result = connection.modify(group, changes)
- if not connection.strategy.sync:
- _, result = connection.get_response(result)
- else:
- result = connection.result
- if result['description'] != 'success':
- error = True
- break
-
- return not error
+"""
+"""
+
+# Created on 2016.12.26
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2016 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from ...core.exceptions import LDAPInvalidDnError, LDAPOperationsErrorResult
+from ... import SEQUENCE_TYPES, MODIFY_DELETE, BASE, DEREF_NEVER
+from ...utils.dn import safe_dn
+
+
+def ad_remove_members_from_groups(connection,
+ members_dn,
+ groups_dn,
+ fix,
+ raise_error=False):
+ """
+ :param connection: a bound Connection object
+ :param members_dn: the list of members to remove from groups
+ :param groups_dn: the list of groups where members are to be removed
+ :param fix: checks for group existence and existing members
+ :param raise_error: If the operation fails it raises an error instead of returning False
+ :return: a boolean where True means that the operation was successful and False means an error has happened
+ Removes users-groups relations following the Activwe Directory rules: users are removed from groups' member attribute
+
+ """
+ if not isinstance(members_dn, SEQUENCE_TYPES):
+ members_dn = [members_dn]
+
+ if not isinstance(groups_dn, SEQUENCE_TYPES):
+ groups_dn = [groups_dn]
+
+ if connection.check_names: # builds new lists with sanitized dn
+ members_dn = [safe_dn(member_dn) for member_dn in members_dn]
+ groups_dn = [safe_dn(group_dn) for group_dn in groups_dn]
+
+ error = False
+
+ for group in groups_dn:
+ if fix: # checks for existance of group and for already assigned members
+ result = connection.search(group, '(objectclass=*)', BASE, dereference_aliases=DEREF_NEVER, attributes=['member'])
+
+ if not connection.strategy.sync:
+ response, result = connection.get_response(result)
+ else:
+ response, result = connection.response, connection.result
+
+ if not result['description'] == 'success':
+ raise LDAPInvalidDnError(group + ' not found')
+
+ existing_members = response[0]['attributes']['member'] if 'member' in response[0]['attributes'] else []
+ else:
+ existing_members = members_dn
+
+ existing_members = [element.lower() for element in existing_members]
+ changes = dict()
+ member_to_remove = [element for element in members_dn if element.lower() in existing_members]
+ if member_to_remove:
+ changes['member'] = (MODIFY_DELETE, member_to_remove)
+ if changes:
+ result = connection.modify(group, changes)
+ if not connection.strategy.sync:
+ _, result = connection.get_response(result)
+ else:
+ result = connection.result
+ if result['description'] != 'success':
+ error = True
+ result_error_params = ['result', 'description', 'dn', 'message']
+ if raise_error:
+ raise LDAPOperationsErrorResult([(k, v) for k, v in result.items() if k in result_error_params])
+ break
+
+ return not error
diff --git a/ldap3/extend/microsoft/unlockAccount.py b/ldap3/extend/microsoft/unlockAccount.py
index 60b9ed3..bc59b58 100644
--- a/ldap3/extend/microsoft/unlockAccount.py
+++ b/ldap3/extend/microsoft/unlockAccount.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -34,7 +34,7 @@ def ad_unlock_account(connection, user_dn, controls=None):
if connection.check_names:
user_dn = safe_dn(user_dn)
result = connection.modify(user_dn,
- {'lockoutTime': [(MODIFY_REPLACE, [0])]},
+ {'lockoutTime': [(MODIFY_REPLACE, ['0'])]},
controls)
if not connection.strategy.sync:
diff --git a/ldap3/extend/novell/addMembersToGroups.py b/ldap3/extend/novell/addMembersToGroups.py
index 5583549..d649dc8 100644
--- a/ldap3/extend/novell/addMembersToGroups.py
+++ b/ldap3/extend/novell/addMembersToGroups.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/checkGroupsMemberships.py b/ldap3/extend/novell/checkGroupsMemberships.py
index 1013fde..c51dbf2 100644
--- a/ldap3/extend/novell/checkGroupsMemberships.py
+++ b/ldap3/extend/novell/checkGroupsMemberships.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/endTransaction.py b/ldap3/extend/novell/endTransaction.py
index 0e9a58c..18bc041 100644
--- a/ldap3/extend/novell/endTransaction.py
+++ b/ldap3/extend/novell/endTransaction.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/getBindDn.py b/ldap3/extend/novell/getBindDn.py
index 39fae2b..492bcdd 100644
--- a/ldap3/extend/novell/getBindDn.py
+++ b/ldap3/extend/novell/getBindDn.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/listReplicas.py b/ldap3/extend/novell/listReplicas.py
index fdc6d08..8ccf2ff 100644
--- a/ldap3/extend/novell/listReplicas.py
+++ b/ldap3/extend/novell/listReplicas.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -45,6 +45,6 @@ class ListReplicas(ExtendedOperation):
def populate_result(self):
try:
- self.result['replicas'] = str(self.decoded_response['replicaList']) if self.decoded_response['replicaList'] else None
+ self.result['replicas'] = [str(replica) for replica in self.decoded_response] if self.decoded_response else None
except TypeError:
self.result['replicas'] = None
diff --git a/ldap3/extend/novell/nmasGetUniversalPassword.py b/ldap3/extend/novell/nmasGetUniversalPassword.py
index 20aa928..291ae92 100644
--- a/ldap3/extend/novell/nmasGetUniversalPassword.py
+++ b/ldap3/extend/novell/nmasGetUniversalPassword.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -46,9 +46,11 @@ class NmasGetUniversalPassword(ExtendedOperation):
self.request_value['reqdn'] = user
def populate_result(self):
- self.result['nmasver'] = int(self.decoded_response['nmasver'])
- self.result['error'] = int(self.decoded_response['err'])
- try:
- self.result['password'] = str(self.decoded_response['passwd']) if self.decoded_response['passwd'] else None
- except TypeError:
- self.result['password'] = None
+ if self.decoded_response:
+ self.result['nmasver'] = int(self.decoded_response['nmasver'])
+ self.result['error'] = int(self.decoded_response['err'])
+ try:
+
+ self.result['password'] = str(self.decoded_response['passwd']) if self.decoded_response['passwd'].hasValue() else None
+ except TypeError:
+ self.result['password'] = None
diff --git a/ldap3/extend/novell/nmasSetUniversalPassword.py b/ldap3/extend/novell/nmasSetUniversalPassword.py
index 65ea0d6..dadab59 100644
--- a/ldap3/extend/novell/nmasSetUniversalPassword.py
+++ b/ldap3/extend/novell/nmasSetUniversalPassword.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/partition_entry_count.py b/ldap3/extend/novell/partition_entry_count.py
index 8218aea..3d46c7a 100644
--- a/ldap3/extend/novell/partition_entry_count.py
+++ b/ldap3/extend/novell/partition_entry_count.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/removeMembersFromGroups.py b/ldap3/extend/novell/removeMembersFromGroups.py
index df493ba..c46c275 100644
--- a/ldap3/extend/novell/removeMembersFromGroups.py
+++ b/ldap3/extend/novell/removeMembersFromGroups.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/replicaInfo.py b/ldap3/extend/novell/replicaInfo.py
index 45bd0e9..057f934 100644
--- a/ldap3/extend/novell/replicaInfo.py
+++ b/ldap3/extend/novell/replicaInfo.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/novell/startTransaction.py b/ldap3/extend/novell/startTransaction.py
index 2ed21c2..6179cb0 100644
--- a/ldap3/extend/novell/startTransaction.py
+++ b/ldap3/extend/novell/startTransaction.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/operation.py b/ldap3/extend/operation.py
index 9906885..c1d478c 100644
--- a/ldap3/extend/operation.py
+++ b/ldap3/extend/operation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/extend/standard/PagedSearch.py b/ldap3/extend/standard/PagedSearch.py
index 6fb1a56..f8bc7e6 100644
--- a/ldap3/extend/standard/PagedSearch.py
+++ b/ldap3/extend/standard/PagedSearch.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -25,7 +25,7 @@
from ... import SUBTREE, DEREF_ALWAYS
from ...utils.dn import safe_dn
-from ...core.results import DO_NOT_RAISE_EXCEPTIONS
+from ...core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_SIZE_LIMIT_EXCEEDED
from ...core.exceptions import LDAPOperationResult
from ...utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED
@@ -47,7 +47,11 @@ def paged_search_generator(connection,
search_base = safe_dn(search_base)
responses = []
- cookie = True # performs search at least one time
+ original_connection = None
+ original_auto_referrals = connection.auto_referrals
+ connection.auto_referrals = False # disable auto referrals because it cannot handle paged searches
+ cookie = True # performs search operation at least one time
+ cachekey = None # for referrals cache
while cookie:
result = connection.search(search_base,
search_filter,
@@ -69,10 +73,11 @@ def paged_search_generator(connection,
response = connection.response
result = connection.result
- if result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection)
- raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
+ if result['referrals'] and original_auto_referrals: # if rererrals are returned start over the loop with a new connection to the referral
+ if not original_connection:
+ original_connection = connection
+ _, connection, cachekey = connection.strategy.create_referral_connection(result['referrals']) # change connection to a valid referrals
+ continue
responses.extend(response)
try:
@@ -80,9 +85,25 @@ def paged_search_generator(connection,
except KeyError:
cookie = None
+ if connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'paged search operation result <%s> for <%s>', result, connection)
+ if result['result'] == RESULT_SIZE_LIMIT_EXCEEDED:
+ while responses:
+ yield responses.pop()
+ raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
+
while responses:
yield responses.pop()
+ if original_connection:
+ connection = original_connection
+ if connection.use_referral_cache and cachekey:
+ connection.strategy.referral_cache[cachekey] = connection
+ else:
+ connection.unbind()
+
+ connection.auto_referrals = original_auto_referrals
connection.response = None
diff --git a/ldap3/extend/standard/PersistentSearch.py b/ldap3/extend/standard/PersistentSearch.py
index 62286e1..b25ec68 100644
--- a/ldap3/extend/standard/PersistentSearch.py
+++ b/ldap3/extend/standard/PersistentSearch.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -80,7 +80,8 @@ class PersistentSearch(object):
else:
self.controls = controls
- self.controls.append(persistent_search_control(events_type, changes_only, notifications))
+ if events_type and changes_only and notifications:
+ self.controls.append(persistent_search_control(events_type, changes_only, notifications))
self.start()
def start(self):
@@ -101,9 +102,10 @@ class PersistentSearch(object):
controls=self.controls)
self.connection.strategy.persistent_search_message_id = self.message_id
- def stop(self):
+ def stop(self, unbind=True):
self.connection.abandon(self.message_id)
- self.connection.unbind()
+ if unbind:
+ self.connection.unbind()
if self.message_id in self.connection.strategy._responses:
del self.connection.strategy._responses[self.message_id]
if hasattr(self.connection.strategy, '_requests') and self.message_id in self.connection.strategy._requests: # asynchronous strategy has a dict of request that could be returned by get_response()
@@ -111,11 +113,25 @@ class PersistentSearch(object):
self.connection.strategy.persistent_search_message_id = None
self.message_id = None
- def next(self):
+ def next(self, block=False, timeout=None):
if not self.connection.strategy.streaming and not self.connection.strategy.callback:
try:
- return self.connection.strategy.events.get_nowait()
+ return self.connection.strategy.events.get(block, timeout)
except Empty:
return None
raise LDAPExtensionError('Persistent search is not accumulating events in queue')
+
+ def funnel(self, block=False, timeout=None):
+ done = False
+ while not done:
+ try:
+ entry = self.connection.strategy.events.get(block, timeout)
+ except Empty:
+ yield None
+ if entry['type'] == 'searchResEntry':
+ yield entry
+ else:
+ done = True
+
+ yield entry
diff --git a/ldap3/extend/standard/modifyPassword.py b/ldap3/extend/standard/modifyPassword.py
index 167816e..7837355 100644
--- a/ldap3/extend/standard/modifyPassword.py
+++ b/ldap3/extend/standard/modifyPassword.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -67,6 +67,6 @@ class ModifyPassword(ExtendedOperation):
self.result[self.response_attribute] = True
else: # change was not successful, raises exception if raise_exception = True in connection or returns the operation result, error code is in result['result']
self.result[self.response_attribute] = False
- if not self.connection.raise_exceptions:
+ if self.connection.raise_exceptions:
from ...core.exceptions import LDAPOperationResult
raise LDAPOperationResult(result=self.result['result'], description=self.result['description'], dn=self.result['dn'], message=self.result['message'], response_type=self.result['type'])
diff --git a/ldap3/extend/standard/whoAmI.py b/ldap3/extend/standard/whoAmI.py
index 121e40b..a6c08a8 100644
--- a/ldap3/extend/standard/whoAmI.py
+++ b/ldap3/extend/standard/whoAmI.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -24,10 +24,10 @@
# If not, see <http://www.gnu.org/licenses/>.
# implements RFC4532
-
from ...extend.operation import ExtendedOperation
from ...utils.conv import to_unicode
+
class WhoAmI(ExtendedOperation):
def config(self):
self.request_name = '1.3.6.1.4.1.4203.1.11.3'
diff --git a/ldap3/operation/abandon.py b/ldap3/operation/abandon.py
index ccc3e88..66fcb6c 100644
--- a/ldap3/operation/abandon.py
+++ b/ldap3/operation/abandon.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/add.py b/ldap3/operation/add.py
index a08e463..d0b95b4 100644
--- a/ldap3/operation/add.py
+++ b/ldap3/operation/add.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/bind.py b/ldap3/operation/bind.py
index a74b04a..43ad1fb 100644
--- a/ldap3/operation/bind.py
+++ b/ldap3/operation/bind.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -25,7 +25,7 @@
from .. import SIMPLE, ANONYMOUS, SASL, STRING_TYPES
from ..core.results import RESULT_CODES
-from ..core.exceptions import LDAPPasswordIsMandatoryError, LDAPUnknownAuthenticationMethodError, LDAPUserNameNotAllowedError
+from ..core.exceptions import LDAPUserNameIsMandatoryError, LDAPPasswordIsMandatoryError, LDAPUnknownAuthenticationMethodError, LDAPUserNameNotAllowedError
from ..protocol.sasl.sasl import validate_simple_password
from ..protocol.rfc4511 import Version, AuthenticationChoice, Simple, BindRequest, ResultCode, SaslCredentials, BindResponse, \
LDAPDN, LDAPString, Referral, ServerSaslCreds, SicilyPackageDiscovery, SicilyNegotiate, SicilyResponse
@@ -52,7 +52,7 @@ def bind_operation(version,
request['name'] = to_unicode(name) if auto_encode else name
if authentication == SIMPLE:
if not name:
- raise LDAPPasswordIsMandatoryError('user name is mandatory in simple bind')
+ raise LDAPUserNameIsMandatoryError('user name is mandatory in simple bind')
if password:
request['authentication'] = AuthenticationChoice().setComponentByName('simple', Simple(validate_simple_password(password)))
else:
@@ -122,7 +122,7 @@ def bind_response_to_dict(response):
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'dn': str(response['matchedDN']),
'message': str(response['diagnosticMessage']),
- 'referrals': referrals_to_list(response['referral']),
+ 'referrals': referrals_to_list(response['referral']) if response['referral'] is not None and response['referral'].hasValue() else [],
'saslCreds': bytes(response['serverSaslCreds']) if response['serverSaslCreds'] is not None and response['serverSaslCreds'].hasValue() else None}
diff --git a/ldap3/operation/compare.py b/ldap3/operation/compare.py
index 5ee03d5..2232f61 100644
--- a/ldap3/operation/compare.py
+++ b/ldap3/operation/compare.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/delete.py b/ldap3/operation/delete.py
index df0aee8..2db40f4 100644
--- a/ldap3/operation/delete.py
+++ b/ldap3/operation/delete.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/extended.py b/ldap3/operation/extended.py
index a80eb7d..4b1ebc7 100644
--- a/ldap3/operation/extended.py
+++ b/ldap3/operation/extended.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -66,7 +66,7 @@ def extended_response_to_dict(response):
'message': str(response['diagnosticMessage']),
'description': ResultCode().getNamedValues().getName(response['resultCode']),
'referrals': referrals_to_list(response['referral']),
- 'responseName': str(response['responseName']) if response['responseName'] else None,
+ 'responseName': str(response['responseName']) if response['responseName'] is not None and response['responseName'].hasValue() else str(),
'responseValue': bytes(response['responseValue']) if response['responseValue'] is not None and response['responseValue'].hasValue() else bytes()}
diff --git a/ldap3/operation/modify.py b/ldap3/operation/modify.py
index 363e1ef..31867e9 100644
--- a/ldap3/operation/modify.py
+++ b/ldap3/operation/modify.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/modifyDn.py b/ldap3/operation/modifyDn.py
index 174bb36..73c6da3 100644
--- a/ldap3/operation/modifyDn.py
+++ b/ldap3/operation/modifyDn.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/operation/search.py b/ldap3/operation/search.py
index 2d4f07b..b78d86d 100644
--- a/ldap3/operation/search.py
+++ b/ldap3/operation/search.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -38,7 +38,7 @@ from ..operation.bind import referrals_to_list
from ..protocol.convert import ava_to_dict, attributes_to_list, search_refs_to_list, validate_assertion_value, prepare_filter_for_sending, search_refs_to_list_fast
from ..protocol.formatters.standard import format_attribute_values
from ..utils.conv import to_unicode, to_raw
-
+from pyasn1.error import PyAsn1UnicodeDecodeError
ROOT = 0
AND = 1
@@ -83,7 +83,7 @@ class FilterNode(object):
return representation
-def evaluate_match(match, schema, auto_escape, auto_encode, check_names):
+def evaluate_match(match, schema, auto_escape, auto_encode, validator, check_names):
left_part, equal_sign, right_part = match.strip().partition('=')
if not equal_sign:
raise LDAPInvalidFilterError('invalid matching assertion')
@@ -91,17 +91,17 @@ def evaluate_match(match, schema, auto_escape, auto_encode, check_names):
tag = MATCH_APPROX
left_part = left_part[:-1].strip()
right_part = right_part.strip()
- assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, check_names)}
+ assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith('>'): # greater or equal match '>='
tag = MATCH_GREATER_OR_EQUAL
left_part = left_part[:-1].strip()
right_part = right_part.strip()
- assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, check_names)}
+ assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith('<'): # less or equal match '<='
tag = MATCH_LESS_OR_EQUAL
left_part = left_part[:-1].strip()
right_part = right_part.strip()
- assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, check_names)}
+ assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
elif left_part.endswith(':'): # extensible match ':='
tag = MATCH_EXTENSIBLE
left_part = left_part[:-1].strip()
@@ -138,7 +138,7 @@ def evaluate_match(match, schema, auto_escape, auto_encode, check_names):
raise LDAPInvalidFilterError('invalid extensible filter')
attribute_name = attribute_name.strip() if attribute_name else False
matching_rule = matching_rule.strip() if matching_rule else False
- assertion = {'attr': attribute_name, 'value': validate_assertion_value(schema, attribute_name, right_part, auto_escape, auto_encode, check_names), 'matchingRule': matching_rule, 'dnAttributes': dn_attributes}
+ assertion = {'attr': attribute_name, 'value': validate_assertion_value(schema, attribute_name, right_part, auto_escape, auto_encode, validator, check_names), 'matchingRule': matching_rule, 'dnAttributes': dn_attributes}
elif right_part == '*': # attribute present match '=*'
tag = MATCH_PRESENT
left_part = left_part.strip()
@@ -148,9 +148,9 @@ def evaluate_match(match, schema, auto_escape, auto_encode, check_names):
left_part = left_part.strip()
right_part = right_part.strip()
substrings = right_part.split('*')
- initial = validate_assertion_value(schema, left_part, substrings[0], auto_escape, auto_encode, check_names) if substrings[0] else None
- final = validate_assertion_value(schema, left_part, substrings[-1], auto_escape, auto_encode, check_names) if substrings[-1] else None
- any_string = [validate_assertion_value(schema, left_part, substring, auto_escape, auto_encode, check_names) for substring in substrings[1:-1] if substring]
+ initial = validate_assertion_value(schema, left_part, substrings[0], auto_escape, auto_encode, validator, check_names) if substrings[0] else None
+ final = validate_assertion_value(schema, left_part, substrings[-1], auto_escape, auto_encode, validator, check_names) if substrings[-1] else None
+ any_string = [validate_assertion_value(schema, left_part, substring, auto_escape, auto_encode, validator, check_names) for substring in substrings[1:-1] if substring]
#assertion = {'attr': left_part, 'initial': initial, 'any': any_string, 'final': final}
assertion = {'attr': left_part}
if initial:
@@ -163,13 +163,13 @@ def evaluate_match(match, schema, auto_escape, auto_encode, check_names):
tag = MATCH_EQUAL
left_part = left_part.strip()
right_part = right_part.strip()
- assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, check_names)}
+ assertion = {'attr': left_part, 'value': validate_assertion_value(schema, left_part, right_part, auto_escape, auto_encode, validator, check_names)}
return FilterNode(tag, assertion)
-def parse_filter(search_filter, schema, auto_escape, auto_encode, check_names):
- if str != bytes and isinstance(search_filter, bytes): # python 3 with byte filter
+def parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names):
+ if str is not bytes and isinstance(search_filter, bytes): # python 3 with byte filter
search_filter = to_unicode(search_filter)
search_filter = search_filter.strip()
if search_filter and search_filter.count('(') == search_filter.count(')') and search_filter.startswith('(') and search_filter.endswith(')'):
@@ -203,7 +203,7 @@ def parse_filter(search_filter, schema, auto_escape, auto_encode, check_names):
if start_pos:
if current_node.tag == NOT and len(current_node.elements) > 0:
raise LDAPInvalidFilterError('NOT (!) clause in filter cannot be multiple')
- current_node.append(evaluate_match(search_filter[start_pos:end_pos], schema, auto_escape, auto_encode, check_names))
+ current_node.append(evaluate_match(search_filter[start_pos:end_pos], schema, auto_escape, auto_encode, validator, check_names))
start_pos = None
state = SEARCH_OPEN_OR_CLOSE
elif (state == SEARCH_MATCH_OR_CLOSE or state == SEARCH_MATCH_OR_CONTROL) and c not in '()':
@@ -324,6 +324,7 @@ def search_operation(search_base,
auto_escape,
auto_encode,
schema=None,
+ validator=None,
check_names=False):
# SearchRequest ::= [APPLICATION 3] SEQUENCE {
# baseObject LDAPDN,
@@ -368,7 +369,7 @@ def search_operation(search_base,
request['sizeLimit'] = Integer0ToMax(size_limit)
request['timeLimit'] = Integer0ToMax(time_limit)
request['typesOnly'] = TypesOnly(True) if types_only else TypesOnly(False)
- request['filter'] = compile_filter(parse_filter(search_filter, schema, auto_escape, auto_encode, check_names).elements[0]) # parse the searchFilter string and compile it starting from the root node
+ request['filter'] = compile_filter(parse_filter(search_filter, schema, auto_escape, auto_encode, validator, check_names).elements[0]) # parse the searchFilter string and compile it starting from the root node
if not isinstance(attributes, SEQUENCE_TYPES):
attributes = [NO_ATTRIBUTES]
@@ -378,8 +379,10 @@ def search_operation(search_base,
def decode_vals(vals):
- return [str(val) for val in vals if val] if vals else None
-
+ try:
+ return [str(val) for val in vals if val] if vals else None
+ except PyAsn1UnicodeDecodeError:
+ return decode_raw_vals(vals)
def decode_vals_fast(vals):
try:
@@ -392,8 +395,7 @@ def attributes_to_dict(attribute_list):
conf_case_insensitive_attributes = get_config_parameter('CASE_INSENSITIVE_ATTRIBUTE_NAMES')
attributes = CaseInsensitiveDict() if conf_case_insensitive_attributes else dict()
for attribute in attribute_list:
- attributes[str(attribute['type'])] = decode_vals(attribute['vals'])
-
+ attributes[str(attribute['type'])] = decode_vals(attribute['vals'])
return attributes
@@ -501,8 +503,11 @@ def filter_to_string(filter_object):
filter_string += matching_rule_assertion_to_string(filter_object['extensibleMatch'])
else:
raise LDAPInvalidFilterError('error converting filter to string')
-
filter_string += ')'
+
+ if str is bytes: # Python2, forces conversion to Unicode
+ filter_string = to_unicode(filter_string)
+
return filter_string
@@ -521,10 +526,11 @@ def search_result_entry_response_to_dict(response, schema, custom_formatter, che
entry = dict()
# entry['dn'] = str(response['object'])
if response['object']:
- entry['raw_dn'] = to_raw(response['object'])
if isinstance(response['object'], STRING_TYPES): # mock strategies return string not a PyAsn1 object
+ entry['raw_dn'] = to_raw(response['object'])
entry['dn'] = to_unicode(response['object'])
else:
+ entry['raw_dn'] = str(response['object'])
entry['dn'] = to_unicode(bytes(response['object']), from_server=True)
else:
entry['raw_dn'] = b''
@@ -551,6 +557,8 @@ def search_result_done_response_to_dict(response):
result['controls'][control[0]] = control[1]
return result
+
+
def search_result_reference_response_to_dict(response):
return {'uri': search_refs_to_list(response)}
diff --git a/ldap3/operation/unbind.py b/ldap3/operation/unbind.py
index 6f1e713..4d418fb 100644
--- a/ldap3/operation/unbind.py
+++ b/ldap3/operation/unbind.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/controls.py b/ldap3/protocol/controls.py
index 197777e..658867b 100644
--- a/ldap3/protocol/controls.py
+++ b/ldap3/protocol/controls.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/convert.py b/ldap3/protocol/convert.py
index 8e40f1f..af3a6f8 100644
--- a/ldap3/protocol/convert.py
+++ b/ldap3/protocol/convert.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -32,7 +32,10 @@ from ..protocol.formatters.standard import find_attribute_validator
def attribute_to_dict(attribute):
- return {'type': str(attribute['type']), 'values': [str(val) for val in attribute['vals']]}
+ try:
+ return {'type': str(attribute['type']), 'values': [str(val) for val in attribute['vals']]}
+ except PyAsn1Error: # invalid encoding, return bytes value
+ return {'type': str(attribute['type']), 'values': [bytes(val) for val in attribute['vals']]}
def attributes_to_dict(attributes):
@@ -44,7 +47,10 @@ def attributes_to_dict(attributes):
def referrals_to_list(referrals):
- return [str(referral) for referral in referrals if referral] if referrals else None
+ if isinstance(referrals, list):
+ return [str(referral) for referral in referrals if referral] if referrals else None
+ else:
+ return [str(referral) for referral in referrals if referral] if referrals is not None and referrals.hasValue() else None
def search_refs_to_list(search_refs):
@@ -85,8 +91,12 @@ def attributes_to_list(attributes):
def ava_to_dict(ava):
try:
return {'attribute': str(ava['attributeDesc']), 'value': escape_filter_chars(str(ava['assertionValue']))}
- except PyAsn1Error: # invalid encoding, return bytes value
- return {'attribute': str(ava['attributeDesc']), 'value': escape_filter_chars(str(bytes(ava['assertionValue'])))}
+ except Exception: # invalid encoding, return bytes value
+ try:
+ return {'attribute': str(ava['attributeDesc']), 'value': escape_filter_chars(bytes(ava['assertionValue']))}
+ except Exception:
+ return {'attribute': str(ava['attributeDesc']), 'value': bytes(ava['assertionValue'])}
+
def substring_to_dict(substring):
return {'initial': substring['initial'] if substring['initial'] else '', 'any': [middle for middle in substring['any']] if substring['any'] else '', 'final': substring['final'] if substring['final'] else ''}
@@ -131,12 +141,12 @@ def build_controls_list(controls):
return built_controls
-def validate_assertion_value(schema, name, value, auto_escape, auto_encode, check_names):
+def validate_assertion_value(schema, name, value, auto_escape, auto_encode, validator, check_names):
value = to_unicode(value)
if auto_escape:
if '\\' in value and not is_filter_escaped(value):
value = escape_filter_chars(value)
- value = validate_attribute_value(schema, name, value, auto_encode, check_names=check_names)
+ value = validate_attribute_value(schema, name, value, auto_encode, validator=validator, check_names=check_names)
return value
@@ -157,6 +167,13 @@ def validate_attribute_value(schema, name, value, auto_encode, validator=None, c
validator = find_attribute_validator(schema, name, validator)
validated = validator(value)
if validated is False:
+ try: # checks if the value is a byte value erroneously converted to a string (as "b'1234'"), this is a common case in Python 3 when encoding is not specified
+ if value[0:2] == "b'" and value [-1] == "'":
+ value = to_raw(value[2:-1])
+ validated = validator(value)
+ except Exception:
+ raise LDAPInvalidValueError('value \'%s\' non valid for attribute \'%s\'' % (value, name))
+ if validated is False:
raise LDAPInvalidValueError('value \'%s\' non valid for attribute \'%s\'' % (value, name))
elif validated is not True: # a valid LDAP value equivalent to the actual value
value = validated
@@ -171,7 +188,7 @@ def prepare_filter_for_sending(raw_string):
ints = []
raw_string = to_raw(raw_string)
while i < len(raw_string):
- if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2: # 92 is backslash
+ if (raw_string[i] == 92 or raw_string[i] == '\\') and i < len(raw_string) - 2: # 92 (0x5C) is backslash
try:
ints.append(int(raw_string[i + 1: i + 3], 16))
i += 2
diff --git a/ldap3/protocol/formatters/formatters.py b/ldap3/protocol/formatters/formatters.py
index de23225..2638d52 100644
--- a/ldap3/protocol/formatters/formatters.py
+++ b/ldap3/protocol/formatters/formatters.py
@@ -1,287 +1,436 @@
-"""
-"""
-
-# Created on 2014.10.28
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from binascii import hexlify
-from uuid import UUID
-from datetime import datetime, timedelta
-
-from ...core.timezone import OffsetTzInfo
-
-
-def format_unicode(raw_value):
- try:
- if str is not bytes: # Python 3
- return str(raw_value, 'utf-8', errors='strict')
- else: # Python 2
- return unicode(raw_value, 'utf-8', errors='strict')
- except (TypeError, UnicodeDecodeError):
- pass
-
- return raw_value
-
-
-def format_integer(raw_value):
- try:
- return int(raw_value)
- except (TypeError, ValueError):
- pass
-
- return raw_value
-
-
-def format_binary(raw_value):
- try:
- return bytes(raw_value)
- except TypeError:
- pass
-
- return raw_value
-
-
-def format_uuid(raw_value):
- try:
- return str(UUID(bytes=raw_value))
- except (TypeError, ValueError):
- return format_unicode(raw_value)
- except Exception:
- pass
-
- return raw_value
-
-
-def format_uuid_le(raw_value):
- try:
- return str(UUID(bytes_le=raw_value))
- except (TypeError, ValueError):
- return format_unicode(raw_value)
- except Exception:
- pass
-
- return raw_value
-
-
-def format_boolean(raw_value):
- if raw_value in [b'TRUE', b'true', b'True']:
- return True
- if raw_value in [b'FALSE', b'false', b'False']:
- return False
-
- return raw_value
-
-
-def format_ad_timestamp(raw_value):
- """
- Active Directory stores date/time values as the number of 100-nanosecond intervals
- that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
- The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
- """
- if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int
- return datetime.max # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999)
- timestamp = int(raw_value)
- try:
- return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600, tz=OffsetTzInfo(0, 'UTC')) # forces true division in python 2
- except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed
- unix_epoch = datetime.fromtimestamp(0, tz=OffsetTzInfo(0, 'UTC'))
- diff_seconds = timedelta(seconds=timestamp/10000000.0 - 11644473600)
- return unix_epoch + diff_seconds
- except Exception as e:
- pass
-
- return raw_value
-
-
-def format_time(raw_value):
- """
- """
-
- '''
- From RFC4517:
- A value of the Generalized Time syntax is a character string
- representing a date and time. The LDAP-specific encoding of a value
- of this syntax is a restriction of the format defined in [ISO8601],
- and is described by the following ABNF:
-
- GeneralizedTime = century year month day hour
- [ minute [ second / leap-second ] ]
- [ fraction ]
- g-time-zone
-
- century = 2(%x30-39) ; "00" to "99"
- year = 2(%x30-39) ; "00" to "99"
- month = ( %x30 %x31-39 ) ; "01" (January) to "09"
- / ( %x31 %x30-32 ) ; "10" to "12"
- day = ( %x30 %x31-39 ) ; "01" to "09"
- / ( %x31-32 %x30-39 ) ; "10" to "29"
- / ( %x33 %x30-31 ) ; "30" to "31"
- hour = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23"
- minute = %x30-35 %x30-39 ; "00" to "59"
- second = ( %x30-35 %x30-39 ) ; "00" to "59"
- leap-second = ( %x36 %x30 ) ; "60"
- fraction = ( DOT / COMMA ) 1*(%x30-39)
- g-time-zone = %x5A ; "Z"
- / g-differential
- g-differential = ( MINUS / PLUS ) hour [ minute ]
- MINUS = %x2D ; minus sign ("-")
- '''
- # if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (b'Z' in raw_value and not raw_value.endswith(b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction
- if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (b'Z' in raw_value and not raw_value.endswith(b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction
- return raw_value
-
- # sets position for fixed values
-
- year = int(raw_value[0: 4])
- month = int(raw_value[4: 6])
- day = int(raw_value[6: 8])
- hour = int(raw_value[8: 10])
- minute = 0
- second = 0
- microsecond = 0
-
- remain = raw_value[10:]
- if remain and remain.endswith(b'Z'): # uppercase 'Z'
- sep = b'Z'
- elif b'+' in remain: # timezone can be specified with +hh[mm] or -hh[mm]
- sep = b'+'
- elif b'-' in remain:
- sep = b'-'
- else: # timezone not specified
- return raw_value
-
- time, _, offset = remain.partition(sep)
-
- if time and (b'.' in time or b',' in time):
- # fraction time
- if time[0] in b',.':
- minute = 6 * int(time[1] if str is bytes else chr(time[1])) # Python 2 / Python 3
- elif time[2] in b',.':
- minute = int(raw_value[10: 12])
- second = 6 * int(time[3] if str is bytes else chr(time[3])) # Python 2 / Python 3
- elif time[4] in b',.':
- minute = int(raw_value[10: 12])
- second = int(raw_value[12: 14])
- microsecond = 100000 * int(time[5] if str is bytes else chr(time[5])) # Python 2 / Python 3
- elif len(time) == 2: # mmZ format
- minute = int(raw_value[10: 12])
- elif len(time) == 0: # Z format
- pass
- elif len(time) == 4: # mmssZ
- minute = int(raw_value[10: 12])
- second = int(raw_value[12: 14])
- else:
- return raw_value
-
- if sep == b'Z': # UTC
- timezone = OffsetTzInfo(0, 'UTC')
- else: # build timezone
- try:
- if len(offset) == 2:
- timezone_hour = int(offset[:2])
- timezone_minute = 0
- elif len(offset) == 4:
- timezone_hour = int(offset[:2])
- timezone_minute = int(offset[2:4])
- else: # malformed timezone
- raise ValueError
- except ValueError:
- return raw_value
- if timezone_hour > 23 or timezone_minute > 59: # invalid timezone
- return raw_value
-
- if str is not bytes: # Python 3
- timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), 'UTC' + str(sep + offset, encoding='utf-8'))
- else: # Python 2
- timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1), unicode('UTC' + sep + offset, encoding='utf-8'))
-
- try:
- return datetime(year=year,
- month=month,
- day=day,
- hour=hour,
- minute=minute,
- second=second,
- microsecond=microsecond,
- tzinfo=timezone)
- except (TypeError, ValueError):
- pass
-
- return raw_value
-
-
-def format_sid(raw_value):
- """
- """
- '''
- SID= "S-1-" IdentifierAuthority 1*SubAuthority
- IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
- ; If the identifier authority is < 2^32, the
- ; identifier authority is represented as a decimal
- ; number
- ; If the identifier authority is >= 2^32,
- ; the identifier authority is represented in
- ; hexadecimal
- IdentifierAuthorityDec = 1*10DIGIT
- ; IdentifierAuthorityDec, top level authority of a
- ; security identifier is represented as a decimal number
- IdentifierAuthorityHex = "0x" 12HEXDIG
- ; IdentifierAuthorityHex, the top-level authority of a
- ; security identifier is represented as a hexadecimal number
- SubAuthority= "-" 1*10DIGIT
- ; Sub-Authority is always represented as a decimal number
- ; No leading "0" characters are allowed when IdentifierAuthority
- ; or SubAuthority is represented as a decimal number
- ; All hexadecimal digits must be output in string format,
- ; pre-pended by "0x"
-
- Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
- SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
- IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
- SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
- '''
-
- if str is not bytes: # Python 3
- revision = int(raw_value[0])
- sub_authority_count = int(raw_value[1])
- identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big')
- if identifier_authority >= 4294967296: # 2 ^ 32
- identifier_authority = hex(identifier_authority)
-
- sub_authority = ''
- i = 0
- while i < sub_authority_count:
- sub_authority += '-' + str(int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little')) # little endian
- i += 1
- else: # Python 2
- revision = int(ord(raw_value[0]))
- sub_authority_count = int(ord(raw_value[1]))
- identifier_authority = int(hexlify(raw_value[2:8]), 16)
- if identifier_authority >= 4294967296: # 2 ^ 32
- identifier_authority = hex(identifier_authority)
-
- sub_authority = ''
- i = 0
- while i < sub_authority_count:
- sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16)) # little endian
- i += 1
- return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
+"""
+"""
+
+# Created on 2014.10.28
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import re
+
+from binascii import hexlify
+from uuid import UUID
+from datetime import datetime, timedelta
+from ...utils.conv import to_unicode
+
+from ...core.timezone import OffsetTzInfo
+
+
+def format_unicode(raw_value):
+ try:
+ if str is not bytes: # Python 3
+ return str(raw_value, 'utf-8', errors='strict')
+ else: # Python 2
+ return unicode(raw_value, 'utf-8', errors='strict')
+ except (TypeError, UnicodeDecodeError):
+ pass
+
+ return raw_value
+
+
+def format_integer(raw_value):
+ try:
+ return int(raw_value)
+ except (TypeError, ValueError): # expected exceptions
+ pass
+ except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
+ pass
+
+ return raw_value
+
+
+def format_binary(raw_value):
+ try:
+ return bytes(raw_value)
+ except TypeError: # expected exceptions
+ pass
+ except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
+ pass
+
+ return raw_value
+
+
+def format_uuid(raw_value):
+ try:
+ return str(UUID(bytes=raw_value))
+ except (TypeError, ValueError):
+ return format_unicode(raw_value)
+ except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
+ pass
+
+ return raw_value
+
+
+def format_uuid_le(raw_value):
+ try:
+ return '{' + str(UUID(bytes_le=raw_value)) + '}'
+ except (TypeError, ValueError):
+ return format_unicode(raw_value)
+ except Exception: # any other exception should be investigated, anyway the formatter return the raw_value
+ pass
+
+ return raw_value
+
+
+def format_boolean(raw_value):
+ if raw_value in [b'TRUE', b'true', b'True']:
+ return True
+ if raw_value in [b'FALSE', b'false', b'False']:
+ return False
+
+ return raw_value
+
+
+def format_ad_timestamp(raw_value):
+ """
+ Active Directory stores date/time values as the number of 100-nanosecond intervals
+ that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
+ The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
+ """
+ utc_timezone = OffsetTzInfo(0, 'UTC')
+ if raw_value == b'9223372036854775807': # max value to be stored in a 64 bit signed int
+ return datetime.max.replace(tzinfo=utc_timezone) # returns datetime.datetime(9999, 12, 31, 23, 59, 59, 999999, tzinfo=OffsetTzInfo(offset=0, name='UTC'))
+ try:
+ timestamp = int(raw_value)
+ if timestamp < 0: # ad timestamp cannot be negative
+ timestamp = timestamp * -1
+ except Exception:
+ return raw_value
+
+ try:
+ return datetime.fromtimestamp(timestamp / 10000000.0 - 11644473600,
+ tz=utc_timezone) # forces true division in python 2
+ except (OSError, OverflowError, ValueError): # on Windows backwards timestamps are not allowed
+ try:
+ unix_epoch = datetime.fromtimestamp(0, tz=utc_timezone)
+ diff_seconds = timedelta(seconds=timestamp / 10000000.0 - 11644473600)
+ return unix_epoch + diff_seconds
+ except Exception:
+ pass
+ except Exception:
+ pass
+
+ return raw_value
+
+
+try: # uses regular expressions and the timezone class (python3.2 and later)
+ from datetime import timezone
+
+ time_format = re.compile(
+ r'''
+ ^
+ (?P<Year>[0-9]{4})
+ (?P<Month>0[1-9]|1[0-2])
+ (?P<Day>0[1-9]|[12][0-9]|3[01])
+ (?P<Hour>[01][0-9]|2[0-3])
+ (?:
+ (?P<Minute>[0-5][0-9])
+ (?P<Second>[0-5][0-9]|60)?
+ )?
+ (?:
+ [.,]
+ (?P<Fraction>[0-9]+)
+ )?
+ (?:
+ Z
+ |
+ (?:
+ (?P<Offset>[+-])
+ (?P<OffHour>[01][0-9]|2[0-3])
+ (?P<OffMinute>[0-5][0-9])?
+ )
+ )
+ $
+ ''',
+ re.VERBOSE
+ )
+
+
+ def format_time(raw_value):
+ try:
+ match = time_format.fullmatch(to_unicode(raw_value))
+ if match is None:
+ return raw_value
+ matches = match.groupdict()
+
+ offset = timedelta(
+ hours=int(matches['OffHour'] or 0),
+ minutes=int(matches['OffMinute'] or 0)
+ )
+
+ if matches['Offset'] == '-':
+ offset *= -1
+
+ # Python does not support leap second in datetime (!)
+ if matches['Second'] == '60':
+ matches['Second'] = '59'
+
+ # According to RFC, fraction may be applied to an Hour/Minute (!)
+ fraction = float('0.' + (matches['Fraction'] or '0'))
+
+ if matches['Minute'] is None:
+ fraction *= 60
+ minute = int(fraction)
+ fraction -= minute
+ else:
+ minute = int(matches['Minute'])
+
+ if matches['Second'] is None:
+ fraction *= 60
+ second = int(fraction)
+ fraction -= second
+ else:
+ second = int(matches['Second'])
+
+ microseconds = int(fraction * 1000000)
+
+ return datetime(
+ int(matches['Year']),
+ int(matches['Month']),
+ int(matches['Day']),
+ int(matches['Hour']),
+ minute,
+ second,
+ microseconds,
+ timezone(offset),
+ )
+ except Exception: # exceptions should be investigated, anyway the formatter return the raw_value
+ pass
+ return raw_value
+
+except ImportError:
+ def format_time(raw_value):
+ """
+ From RFC4517:
+ A value of the Generalized Time syntax is a character string
+ representing a date and time. The LDAP-specific encoding of a value
+ of this syntax is a restriction of the format defined in [ISO8601],
+ and is described by the following ABNF:
+
+ GeneralizedTime = century year month day hour
+ [ minute [ second / leap-second ] ]
+ [ fraction ]
+ g-time-zone
+
+ century = 2(%x30-39) ; "00" to "99"
+ year = 2(%x30-39) ; "00" to "99"
+ month = ( %x30 %x31-39 ) ; "01" (January) to "09"
+ / ( %x31 %x30-32 ) ; "10" to "12"
+ day = ( %x30 %x31-39 ) ; "01" to "09"
+ / ( %x31-32 %x30-39 ) ; "10" to "29"
+ / ( %x33 %x30-31 ) ; "30" to "31"
+ hour = ( %x30-31 %x30-39 ) / ( %x32 %x30-33 ) ; "00" to "23"
+ minute = %x30-35 %x30-39 ; "00" to "59"
+ second = ( %x30-35 %x30-39 ) ; "00" to "59"
+ leap-second = ( %x36 %x30 ) ; "60"
+ fraction = ( DOT / COMMA ) 1*(%x30-39)
+ g-time-zone = %x5A ; "Z"
+ / g-differential
+ g-differential = ( MINUS / PLUS ) hour [ minute ]
+ MINUS = %x2D ; minus sign ("-")
+ """
+
+ if len(raw_value) < 10 or not all((c in b'0123456789+-,.Z' for c in raw_value)) or (
+ b'Z' in raw_value and not raw_value.endswith(
+ b'Z')): # first ten characters are mandatory and must be numeric or timezone or fraction
+ return raw_value
+
+ # sets position for fixed values
+ year = int(raw_value[0: 4])
+ month = int(raw_value[4: 6])
+ day = int(raw_value[6: 8])
+ hour = int(raw_value[8: 10])
+ minute = 0
+ second = 0
+ microsecond = 0
+
+ remain = raw_value[10:]
+ if remain and remain.endswith(b'Z'): # uppercase 'Z'
+ sep = b'Z'
+ elif b'+' in remain: # timezone can be specified with +hh[mm] or -hh[mm]
+ sep = b'+'
+ elif b'-' in remain:
+ sep = b'-'
+ else: # timezone not specified
+ return raw_value
+
+ time, _, offset = remain.partition(sep)
+
+ if time and (b'.' in time or b',' in time):
+ # fraction time
+ if time[0] in b',.':
+ minute = 6 * int(time[1] if str is bytes else chr(time[1])) # Python 2 / Python 3
+ elif time[2] in b',.':
+ minute = int(raw_value[10: 12])
+ second = 6 * int(time[3] if str is bytes else chr(time[3])) # Python 2 / Python 3
+ elif time[4] in b',.':
+ minute = int(raw_value[10: 12])
+ second = int(raw_value[12: 14])
+ microsecond = 100000 * int(time[5] if str is bytes else chr(time[5])) # Python 2 / Python 3
+ elif len(time) == 2: # mmZ format
+ minute = int(raw_value[10: 12])
+ elif len(time) == 0: # Z format
+ pass
+ elif len(time) == 4: # mmssZ
+ minute = int(raw_value[10: 12])
+ second = int(raw_value[12: 14])
+ else:
+ return raw_value
+
+ if sep == b'Z': # UTC
+ timezone = OffsetTzInfo(0, 'UTC')
+ else: # build timezone
+ try:
+ if len(offset) == 2:
+ timezone_hour = int(offset[:2])
+ timezone_minute = 0
+ elif len(offset) == 4:
+ timezone_hour = int(offset[:2])
+ timezone_minute = int(offset[2:4])
+ else: # malformed timezone
+ raise ValueError
+ except ValueError:
+ return raw_value
+ if timezone_hour > 23 or timezone_minute > 59: # invalid timezone
+ return raw_value
+
+ if str is not bytes: # Python 3
+ timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1),
+ 'UTC' + str(sep + offset, encoding='utf-8'))
+ else: # Python 2
+ timezone = OffsetTzInfo((timezone_hour * 60 + timezone_minute) * (1 if sep == b'+' else -1),
+ unicode('UTC' + sep + offset, encoding='utf-8'))
+
+ try:
+ return datetime(year=year,
+ month=month,
+ day=day,
+ hour=hour,
+ minute=minute,
+ second=second,
+ microsecond=microsecond,
+ tzinfo=timezone)
+ except (TypeError, ValueError):
+ pass
+
+ return raw_value
+
+
+def format_ad_timedelta(raw_value):
+ """
+ Convert a negative filetime value to a timedelta.
+ """
+ # Active Directory stores attributes like "minPwdAge" as a negative
+ # "filetime" timestamp, which is the number of 100-nanosecond intervals that
+ # have elapsed since the 0 hour on January 1, 1601.
+ #
+ # Handle the minimum value that can be stored in a 64 bit signed integer.
+ # See https://docs.microsoft.com/en-us/dotnet/api/system.int64.minvalue
+ # In attributes like "maxPwdAge", this signifies never.
+ if raw_value == b'-9223372036854775808':
+ return timedelta.max
+ # We can reuse format_ad_timestamp to get a datetime object from the
+ # timestamp. Afterwards, we can subtract a datetime representing 0 hour on
+ # January 1, 1601 from the returned datetime to get the timedelta.
+ return format_ad_timestamp(raw_value) - format_ad_timestamp(0)
+
+
+def format_time_with_0_year(raw_value):
+ try:
+ if raw_value.startswith(b'0000'):
+ return raw_value
+ except Exception:
+ try:
+ if raw_value.startswith('0000'):
+ return raw_value
+ except Exception:
+ pass
+
+ return format_time(raw_value)
+
+
+def format_sid(raw_value):
+ """
+ SID= "S-1-" IdentifierAuthority 1*SubAuthority
+ IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
+ ; If the identifier authority is < 2^32, the
+ ; identifier authority is represented as a decimal
+ ; number
+ ; If the identifier authority is >= 2^32,
+ ; the identifier authority is represented in
+ ; hexadecimal
+ IdentifierAuthorityDec = 1*10DIGIT
+ ; IdentifierAuthorityDec, top level authority of a
+ ; security identifier is represented as a decimal number
+ IdentifierAuthorityHex = "0x" 12HEXDIG
+ ; IdentifierAuthorityHex, the top-level authority of a
+ ; security identifier is represented as a hexadecimal number
+ SubAuthority= "-" 1*10DIGIT
+ ; Sub-Authority is always represented as a decimal number
+ ; No leading "0" characters are allowed when IdentifierAuthority
+ ; or SubAuthority is represented as a decimal number
+ ; All hexadecimal digits must be output in string format,
+ ; pre-pended by "0x"
+
+ Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
+ SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
+ IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
+ SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
+ """
+ try:
+ if raw_value.startswith(b'S-1-'):
+ return raw_value
+ except Exception:
+ try:
+ if raw_value.startswith('S-1-'):
+ return raw_value
+ except Exception:
+ pass
+ try:
+ if str is not bytes: # Python 3
+ revision = int(raw_value[0])
+ sub_authority_count = int(raw_value[1])
+ identifier_authority = int.from_bytes(raw_value[2:8], byteorder='big')
+ if identifier_authority >= 4294967296: # 2 ^ 32
+ identifier_authority = hex(identifier_authority)
+
+ sub_authority = ''
+ i = 0
+ while i < sub_authority_count:
+ sub_authority += '-' + str(
+ int.from_bytes(raw_value[8 + (i * 4): 12 + (i * 4)], byteorder='little')) # little endian
+ i += 1
+ else: # Python 2
+ revision = int(ord(raw_value[0]))
+ sub_authority_count = int(ord(raw_value[1]))
+ identifier_authority = int(hexlify(raw_value[2:8]), 16)
+ if identifier_authority >= 4294967296: # 2 ^ 32
+ identifier_authority = hex(identifier_authority)
+
+ sub_authority = ''
+ i = 0
+ while i < sub_authority_count:
+ sub_authority += '-' + str(int(hexlify(raw_value[11 + (i * 4): 7 + (i * 4): -1]), 16)) # little endian
+ i += 1
+ return 'S-' + str(revision) + '-' + str(identifier_authority) + sub_authority
+ except Exception: # any exception should be investigated, anyway the formatter return the raw_value
+ pass
+
+ return raw_value
diff --git a/ldap3/protocol/formatters/standard.py b/ldap3/protocol/formatters/standard.py
index e6488e9..42f6c26 100644
--- a/ldap3/protocol/formatters/standard.py
+++ b/ldap3/protocol/formatters/standard.py
@@ -1,227 +1,238 @@
-"""
-"""
-
-# Created on 2014.10.28
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from ... import SEQUENCE_TYPES
-from .formatters import format_ad_timestamp, format_binary, format_boolean,\
- format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le
-from .validators import validate_integer, validate_time, always_valid,\
- validate_generic_single_value, validate_boolean, validate_ad_timestamp,\
- validate_uuid_le, validate_uuid, validate_minus_one
-
-# for each syntax can be specified a format function and a input validation function
-
-standard_formatter = {
- '1.2.840.113556.1.4.903': (format_binary, None), # Object (DN-binary) - Microsoft
- '1.2.840.113556.1.4.904': (format_unicode, None), # Object (DN-string) - Microsoft
- '1.2.840.113556.1.4.905': (format_unicode, None), # String (Teletex) - Microsoft
- '1.2.840.113556.1.4.906': (format_integer, validate_integer), # Large integer - Microsoft
- '1.2.840.113556.1.4.907': (format_binary, None), # String (NT-sec-desc) - Microsoft
- '1.2.840.113556.1.4.1221': (format_binary, None), # Object (OR-name) - Microsoft
- '1.2.840.113556.1.4.1362': (format_unicode, None), # String (Case) - Microsoft
- '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None), # ACI item [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None), # Access point [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None), # Attribute type description
- '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None), # Audio [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None), # Binary [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None), # Bit String
- '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean), # Boolean
- '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None), # Certificate [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None), # Certificate List [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None), # Certificate Pair [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None), # Country String
- '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None), # Distinguished name (DN)
- '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None), # Data Quality Syntax [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None), # Delivery method
- '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None), # Directory string
- '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None), # DIT Content Rule Description
- '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None), # DIT Structure Rule Description
- '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None), # DL Submit Permission [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None), # DSA Quality Syntax [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None), # DSE Type [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None), # Enhanced Guide
- '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None), # Facsimile Telephone Number
- '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None), # Fax
- '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time), # Generalized time
- '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None), # Guide [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None), # IA5 string
- '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer), # Integer
- '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None), # JPEG
- '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None), # Master and Shadow Access Points [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None), # Matching rule description
- '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None), # Matching rule use description
- '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None), # Mail Preference [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None), # MHS OR Address [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None), # Name and optional UID
- '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None), # Name form description
- '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None), # Numeric string
- '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None), # Object class description
- '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None), # OID
- '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None), # Other mailbox
- '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None), # Octet string
- '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None), # Postal address
- '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None), # Protocol Information [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None), # Presentation Address [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None), # Printable string
- '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None), # Subtree specification [OBSOLETE
- '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None), # Supplier Information [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None), # Supplier Or Consumer [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None), # Supplier And Consumer [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None), # Supported Algorithm [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None), # Telephone number
- '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None), # Teletex terminal identifier
- '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None), # Teletex number
- '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time), # Utc time (deprecated)
- '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None), # LDAP syntax description
- '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None), # Modify rights [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None), # LDAP Schema Definition [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None), # LDAP Schema Description [OBSOLETE]
- '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None), # Substring assertion
- '1.3.6.1.1.16.1': (format_uuid, validate_uuid), # UUID
- '2.16.840.1.113719.1.1.4.1.501': (format_uuid, None), # GUID (Novell)
- '2.16.840.1.113719.1.1.5.1.0': (format_binary, None), # Unknown (Novell)
- '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None), # Case Ignore List (Novell)
- '2.16.840.1.113719.1.1.5.1.12': (format_binary, None), # Tagged Data (Novell)
- '2.16.840.1.113719.1.1.5.1.13': (format_binary, None), # Octet List (Novell)
- '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None), # Tagged String (Novell)
- '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None), # Tagged Name And String (Novell)
- '2.16.840.1.113719.1.1.5.1.16': (format_binary, None), # NDS Replica Pointer (Novell)
- '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None), # NDS ACL (Novell)
- '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time), # NDS Timestamp (Novell)
- '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer), # Counter (Novell)
- '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None), # Tagged Name (Novell)
- '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None), # Typed Name (Novell)
- 'supportedldapversion': (format_integer, None), # supportedLdapVersion (Microsoft)
- 'octetstring': (format_binary, validate_uuid_le), # octect string (Microsoft)
- '1.2.840.113556.1.4.2': (format_uuid_le, None), # object guid (Microsoft)
- '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp), # builtinCreationTime (Microsoft)
- '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp), # creationTime (Microsoft)
- '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp), # badPasswordTime (Microsoft)
- '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp), # lastLogoff (Microsoft)
- '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp), # lastLogon (Microsoft)
- '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_minus_one), # pwdLastSet (Microsoft, can be set to -1 only)
- '1.2.840.113556.1.4.146': (format_sid, None), # objectSid (Microsoft)
- '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp), # accountExpires (Microsoft)
- '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp), # lockoutTime (Microsoft)
- '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp) # lastLogonTimestamp (Microsoft)
-}
-
-
-def find_attribute_helpers(attr_type, name, custom_formatter):
- """
- Tries to format following the OIDs info and format_helper specification.
- Search for attribute oid, then attribute name (can be multiple), then attribute syntax
- Precedence is:
- 1. attribute name
- 2. attribute oid(from schema)
- 3. attribute names (from oid_info)
- 4. attribute syntax (from schema)
- Custom formatters can be defined in Server object and have precedence over the standard_formatters
- If no formatter is found the raw_value is returned as bytes.
- Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object
- Formatter functions can return any kind of object
- return a tuple (formatter, validator)
- """
- formatter = None
- if custom_formatter and isinstance(custom_formatter, dict): # if custom formatters are defined they have precedence over the standard formatters
- if name in custom_formatter: # search for attribute name, as returned by the search operation
- formatter = custom_formatter[name]
-
- if not formatter and attr_type and attr_type.oid in custom_formatter: # search for attribute oid as returned by schema
- formatter = custom_formatter[attr_type.oid]
- if not formatter and attr_type and attr_type.oid_info:
- if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
- for attr_name in attr_type.oid_info[2]:
- if attr_name in custom_formatter:
- formatter = custom_formatter[attr_name]
- break
- elif attr_type.oid_info[2] in custom_formatter: # search for name defined in oid_info
- formatter = custom_formatter[attr_type.oid_info[2]]
-
- if not formatter and attr_type and attr_type.syntax in custom_formatter: # search for syntax defined in schema
- formatter = custom_formatter[attr_type.syntax]
-
- if not formatter and name in standard_formatter: # search for attribute name, as returned by the search operation
- formatter = standard_formatter[name]
-
- if not formatter and attr_type and attr_type.oid in standard_formatter: # search for attribute oid as returned by schema
- formatter = standard_formatter[attr_type.oid]
-
- if not formatter and attr_type and attr_type.oid_info:
- if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
- for attr_name in attr_type.oid_info[2]:
- if attr_name in standard_formatter:
- formatter = standard_formatter[attr_name]
- break
- elif attr_type.oid_info[2] in standard_formatter: # search for name defined in oid_info
- formatter = standard_formatter[attr_type.oid_info[2]]
- if not formatter and attr_type and attr_type.syntax in standard_formatter: # search for syntax defined in schema
- formatter = standard_formatter[attr_type.syntax]
-
- if formatter is None:
- return None, None
-
- return formatter
-
-
-def format_attribute_values(schema, name, values, custom_formatter):
- if not values: # RFCs states that attributes must always have values, but a flaky server returns empty values too
- return []
-
- if schema and schema.attribute_types and name in schema.attribute_types:
- attr_type = schema.attribute_types[name]
- else:
- attr_type = None
-
- attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter)
- if not isinstance(attribute_helpers, tuple): # custom formatter
- formatter = attribute_helpers
- else:
- formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0]
-
- formatted_values = [formatter(raw_value) for raw_value in values] # executes formatter
- if formatted_values:
- return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values
- else: # RFCs states that attributes must always have values, but AD return empty values in DirSync
- return []
-
-
-def find_attribute_validator(schema, name, custom_validator):
- if schema and schema.attribute_types and name in schema.attribute_types:
- attr_type = schema.attribute_types[name]
- else:
- attr_type = None
-
- attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator)
- if not isinstance(attribute_helpers, tuple): # custom validator
- validator = attribute_helpers
- else:
- if not attribute_helpers[1]:
- if attr_type and attr_type.single_value:
- validator = validate_generic_single_value # validate only single value
- else:
- validator = always_valid # unknown syntax, accepts single and multi value
- else:
- validator = attribute_helpers[1]
- return validator
+"""
+"""
+
+# Created on 2014.10.28
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from ... import SEQUENCE_TYPES
+from .formatters import format_ad_timestamp, format_binary, format_boolean,\
+ format_integer, format_sid, format_time, format_unicode, format_uuid, format_uuid_le, format_time_with_0_year,\
+ format_ad_timedelta
+from .validators import validate_integer, validate_time, always_valid,\
+ validate_generic_single_value, validate_boolean, validate_ad_timestamp, validate_sid,\
+ validate_uuid_le, validate_uuid, validate_zero_and_minus_one_and_positive_int, validate_guid, validate_time_with_0_year,\
+ validate_ad_timedelta
+
+# for each syntax can be specified a format function and a input validation function
+
+standard_formatter = {
+ '1.2.840.113556.1.4.903': (format_binary, None), # Object (DN-binary) - Microsoft
+ '1.2.840.113556.1.4.904': (format_unicode, None), # Object (DN-string) - Microsoft
+ '1.2.840.113556.1.4.905': (format_unicode, None), # String (Teletex) - Microsoft
+ '1.2.840.113556.1.4.906': (format_integer, validate_integer), # Large integer - Microsoft
+ '1.2.840.113556.1.4.907': (format_binary, None), # String (NT-sec-desc) - Microsoft
+ '1.2.840.113556.1.4.1221': (format_binary, None), # Object (OR-name) - Microsoft
+ '1.2.840.113556.1.4.1362': (format_unicode, None), # String (Case) - Microsoft
+ '1.3.6.1.4.1.1466.115.121.1.1': (format_binary, None), # ACI item [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.2': (format_binary, None), # Access point [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.3': (format_unicode, None), # Attribute type description
+ '1.3.6.1.4.1.1466.115.121.1.4': (format_binary, None), # Audio [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.5': (format_binary, None), # Binary [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.6': (format_unicode, None), # Bit String
+ '1.3.6.1.4.1.1466.115.121.1.7': (format_boolean, validate_boolean), # Boolean
+ '1.3.6.1.4.1.1466.115.121.1.8': (format_binary, None), # Certificate [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.9': (format_binary, None), # Certificate List [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.10': (format_binary, None), # Certificate Pair [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.11': (format_unicode, None), # Country String
+ '1.3.6.1.4.1.1466.115.121.1.12': (format_unicode, None), # Distinguished name (DN)
+ '1.3.6.1.4.1.1466.115.121.1.13': (format_binary, None), # Data Quality Syntax [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.14': (format_unicode, None), # Delivery method
+ '1.3.6.1.4.1.1466.115.121.1.15': (format_unicode, None), # Directory string
+ '1.3.6.1.4.1.1466.115.121.1.16': (format_unicode, None), # DIT Content Rule Description
+ '1.3.6.1.4.1.1466.115.121.1.17': (format_unicode, None), # DIT Structure Rule Description
+ '1.3.6.1.4.1.1466.115.121.1.18': (format_binary, None), # DL Submit Permission [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.19': (format_binary, None), # DSA Quality Syntax [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.20': (format_binary, None), # DSE Type [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.21': (format_binary, None), # Enhanced Guide
+ '1.3.6.1.4.1.1466.115.121.1.22': (format_unicode, None), # Facsimile Telephone Number
+ '1.3.6.1.4.1.1466.115.121.1.23': (format_binary, None), # Fax
+ '1.3.6.1.4.1.1466.115.121.1.24': (format_time, validate_time), # Generalized time
+ '1.3.6.1.4.1.1466.115.121.1.25': (format_binary, None), # Guide [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.26': (format_unicode, None), # IA5 string
+ '1.3.6.1.4.1.1466.115.121.1.27': (format_integer, validate_integer), # Integer
+ '1.3.6.1.4.1.1466.115.121.1.28': (format_binary, None), # JPEG
+ '1.3.6.1.4.1.1466.115.121.1.29': (format_binary, None), # Master and Shadow Access Points [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.30': (format_unicode, None), # Matching rule description
+ '1.3.6.1.4.1.1466.115.121.1.31': (format_unicode, None), # Matching rule use description
+ '1.3.6.1.4.1.1466.115.121.1.32': (format_unicode, None), # Mail Preference [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.33': (format_unicode, None), # MHS OR Address [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.34': (format_unicode, None), # Name and optional UID
+ '1.3.6.1.4.1.1466.115.121.1.35': (format_unicode, None), # Name form description
+ '1.3.6.1.4.1.1466.115.121.1.36': (format_unicode, None), # Numeric string
+ '1.3.6.1.4.1.1466.115.121.1.37': (format_unicode, None), # Object class description
+ '1.3.6.1.4.1.1466.115.121.1.38': (format_unicode, None), # OID
+ '1.3.6.1.4.1.1466.115.121.1.39': (format_unicode, None), # Other mailbox
+ '1.3.6.1.4.1.1466.115.121.1.40': (format_binary, None), # Octet string
+ '1.3.6.1.4.1.1466.115.121.1.41': (format_unicode, None), # Postal address
+ '1.3.6.1.4.1.1466.115.121.1.42': (format_binary, None), # Protocol Information [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.43': (format_binary, None), # Presentation Address [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.44': (format_unicode, None), # Printable string
+ '1.3.6.1.4.1.1466.115.121.1.45': (format_binary, None), # Subtree specification [OBSOLETE
+ '1.3.6.1.4.1.1466.115.121.1.46': (format_binary, None), # Supplier Information [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.47': (format_binary, None), # Supplier Or Consumer [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.48': (format_binary, None), # Supplier And Consumer [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.49': (format_binary, None), # Supported Algorithm [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.50': (format_unicode, None), # Telephone number
+ '1.3.6.1.4.1.1466.115.121.1.51': (format_unicode, None), # Teletex terminal identifier
+ '1.3.6.1.4.1.1466.115.121.1.52': (format_unicode, None), # Teletex number
+ '1.3.6.1.4.1.1466.115.121.1.53': (format_time, validate_time), # Utc time (deprecated)
+ '1.3.6.1.4.1.1466.115.121.1.54': (format_unicode, None), # LDAP syntax description
+ '1.3.6.1.4.1.1466.115.121.1.55': (format_binary, None), # Modify rights [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.56': (format_binary, None), # LDAP Schema Definition [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.57': (format_unicode, None), # LDAP Schema Description [OBSOLETE]
+ '1.3.6.1.4.1.1466.115.121.1.58': (format_unicode, None), # Substring assertion
+ '1.3.6.1.1.16.1': (format_uuid, validate_uuid), # UUID
+ '1.3.6.1.1.16.4': (format_uuid, validate_uuid), # entryUUID (RFC 4530)
+ '2.16.840.1.113719.1.1.4.1.501': (format_uuid, validate_guid), # GUID (Novell)
+ '2.16.840.1.113719.1.1.5.1.0': (format_binary, None), # Unknown (Novell)
+ '2.16.840.1.113719.1.1.5.1.6': (format_unicode, None), # Case Ignore List (Novell)
+ '2.16.840.1.113719.1.1.5.1.12': (format_binary, None), # Tagged Data (Novell)
+ '2.16.840.1.113719.1.1.5.1.13': (format_binary, None), # Octet List (Novell)
+ '2.16.840.1.113719.1.1.5.1.14': (format_unicode, None), # Tagged String (Novell)
+ '2.16.840.1.113719.1.1.5.1.15': (format_unicode, None), # Tagged Name And String (Novell)
+ '2.16.840.1.113719.1.1.5.1.16': (format_binary, None), # NDS Replica Pointer (Novell)
+ '2.16.840.1.113719.1.1.5.1.17': (format_unicode, None), # NDS ACL (Novell)
+ '2.16.840.1.113719.1.1.5.1.19': (format_time, validate_time), # NDS Timestamp (Novell)
+ '2.16.840.1.113719.1.1.5.1.22': (format_integer, validate_integer), # Counter (Novell)
+ '2.16.840.1.113719.1.1.5.1.23': (format_unicode, None), # Tagged Name (Novell)
+ '2.16.840.1.113719.1.1.5.1.25': (format_unicode, None), # Typed Name (Novell)
+ 'supportedldapversion': (format_integer, None), # supportedLdapVersion (Microsoft)
+ 'octetstring': (format_binary, validate_uuid_le), # octect string (Microsoft)
+ '1.2.840.113556.1.4.2': (format_uuid_le, validate_uuid_le), # objectGUID (Microsoft)
+ '1.2.840.113556.1.4.13': (format_ad_timestamp, validate_ad_timestamp), # builtinCreationTime (Microsoft)
+ '1.2.840.113556.1.4.26': (format_ad_timestamp, validate_ad_timestamp), # creationTime (Microsoft)
+ '1.2.840.113556.1.4.49': (format_ad_timestamp, validate_ad_timestamp), # badPasswordTime (Microsoft)
+ '1.2.840.113556.1.4.51': (format_ad_timestamp, validate_ad_timestamp), # lastLogoff (Microsoft)
+ '1.2.840.113556.1.4.52': (format_ad_timestamp, validate_ad_timestamp), # lastLogon (Microsoft)
+ '1.2.840.113556.1.4.60': (format_ad_timedelta, validate_ad_timedelta), # lockoutDuration (Microsoft)
+ '1.2.840.113556.1.4.61': (format_ad_timedelta, validate_ad_timedelta), # lockOutObservationWindow (Microsoft)
+ '1.2.840.113556.1.4.74': (format_ad_timedelta, validate_ad_timedelta), # maxPwdAge (Microsoft)
+ '1.2.840.113556.1.4.78': (format_ad_timedelta, validate_ad_timedelta), # minPwdAge (Microsoft)
+ '1.2.840.113556.1.4.96': (format_ad_timestamp, validate_zero_and_minus_one_and_positive_int), # pwdLastSet (Microsoft, can be set to -1 only)
+ '1.2.840.113556.1.4.146': (format_sid, validate_sid), # objectSid (Microsoft)
+ '1.2.840.113556.1.4.159': (format_ad_timestamp, validate_ad_timestamp), # accountExpires (Microsoft)
+ '1.2.840.113556.1.4.662': (format_ad_timestamp, validate_ad_timestamp), # lockoutTime (Microsoft)
+ '1.2.840.113556.1.4.1696': (format_ad_timestamp, validate_ad_timestamp), # lastLogonTimestamp (Microsoft)
+ '1.3.6.1.4.1.42.2.27.8.1.17': (format_time_with_0_year, validate_time_with_0_year) # pwdAccountLockedTime (Novell)
+}
+
+
+def find_attribute_helpers(attr_type, name, custom_formatter):
+ """
+ Tries to format following the OIDs info and format_helper specification.
+ Search for attribute oid, then attribute name (can be multiple), then attribute syntax
+ Precedence is:
+ 1. attribute name
+ 2. attribute oid(from schema)
+ 3. attribute names (from oid_info)
+ 4. attribute syntax (from schema)
+ Custom formatters can be defined in Server object and have precedence over the standard_formatters
+ If no formatter is found the raw_value is returned as bytes.
+ Attributes defined as SINGLE_VALUE in schema are returned as a single object, otherwise are returned as a list of object
+ Formatter functions can return any kind of object
+ return a tuple (formatter, validator)
+ """
+ formatter = None
+ if custom_formatter and isinstance(custom_formatter, dict): # if custom formatters are defined they have precedence over the standard formatters
+ if name in custom_formatter: # search for attribute name, as returned by the search operation
+ formatter = custom_formatter[name]
+
+ if not formatter and attr_type and attr_type.oid in custom_formatter: # search for attribute oid as returned by schema
+ formatter = custom_formatter[attr_type.oid]
+ if not formatter and attr_type and attr_type.oid_info:
+ if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
+ for attr_name in attr_type.oid_info[2]:
+ if attr_name in custom_formatter:
+ formatter = custom_formatter[attr_name]
+ break
+ elif attr_type.oid_info[2] in custom_formatter: # search for name defined in oid_info
+ formatter = custom_formatter[attr_type.oid_info[2]]
+
+ if not formatter and attr_type and attr_type.syntax in custom_formatter: # search for syntax defined in schema
+ formatter = custom_formatter[attr_type.syntax]
+
+ if not formatter and name in standard_formatter: # search for attribute name, as returned by the search operation
+ formatter = standard_formatter[name]
+
+ if not formatter and attr_type and attr_type.oid in standard_formatter: # search for attribute oid as returned by schema
+ formatter = standard_formatter[attr_type.oid]
+
+ if not formatter and attr_type and attr_type.oid_info:
+ if isinstance(attr_type.oid_info[2], SEQUENCE_TYPES): # search for multiple names defined in oid_info
+ for attr_name in attr_type.oid_info[2]:
+ if attr_name in standard_formatter:
+ formatter = standard_formatter[attr_name]
+ break
+ elif attr_type.oid_info[2] in standard_formatter: # search for name defined in oid_info
+ formatter = standard_formatter[attr_type.oid_info[2]]
+ if not formatter and attr_type and attr_type.syntax in standard_formatter: # search for syntax defined in schema
+ formatter = standard_formatter[attr_type.syntax]
+
+ if formatter is None:
+ return None, None
+
+ return formatter
+
+
+def format_attribute_values(schema, name, values, custom_formatter):
+ if not values: # RFCs states that attributes must always have values, but a flaky server returns empty values too
+ return []
+
+ if not isinstance(values, SEQUENCE_TYPES):
+ values = [values]
+
+ if schema and schema.attribute_types and name in schema.attribute_types:
+ attr_type = schema.attribute_types[name]
+ else:
+ attr_type = None
+
+ attribute_helpers = find_attribute_helpers(attr_type, name, custom_formatter)
+ if not isinstance(attribute_helpers, tuple): # custom formatter
+ formatter = attribute_helpers
+ else:
+ formatter = format_unicode if not attribute_helpers[0] else attribute_helpers[0]
+
+ formatted_values = [formatter(raw_value) for raw_value in values] # executes formatter
+ if formatted_values:
+ return formatted_values[0] if (attr_type and attr_type.single_value) else formatted_values
+ else: # RFCs states that attributes must always have values, but AD return empty values in DirSync
+ return []
+
+
+def find_attribute_validator(schema, name, custom_validator):
+ if schema and schema.attribute_types and name in schema.attribute_types:
+ attr_type = schema.attribute_types[name]
+ else:
+ attr_type = None
+
+ attribute_helpers = find_attribute_helpers(attr_type, name, custom_validator)
+ if not isinstance(attribute_helpers, tuple): # custom validator
+ validator = attribute_helpers
+ else:
+ if not attribute_helpers[1]:
+ if attr_type and attr_type.single_value:
+ validator = validate_generic_single_value # validate only single value
+ else:
+ validator = always_valid # unknown syntax, accepts single and multi value
+ else:
+ validator = attribute_helpers[1]
+ return validator
diff --git a/ldap3/protocol/formatters/validators.py b/ldap3/protocol/formatters/validators.py
index a332c1a..3ab300d 100644
--- a/ldap3/protocol/formatters/validators.py
+++ b/ldap3/protocol/formatters/validators.py
@@ -1,282 +1,503 @@
-"""
-"""
-
-# Created on 2016.08.09
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2016 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from datetime import datetime
-from calendar import timegm
-from uuid import UUID
-
-from ... import SEQUENCE_TYPES, STRING_TYPES
-from .formatters import format_time, format_ad_timestamp
-from ...utils.conv import to_raw, to_unicode
-
-# Validators return True if value is valid, False if value is not valid,
-# or a value different from True and False that is a valid value to substitute to the input value
-
-
-def check_type(input_value, value_type):
- if isinstance(input_value, value_type):
- return True
-
- if isinstance(input_value, SEQUENCE_TYPES):
- for value in input_value:
- if not isinstance(value, value_type):
- return False
- return True
-
- return False
-
-
-def always_valid(input_value):
- return True
-
-
-def validate_generic_single_value(input_value):
- if not isinstance(input_value, SEQUENCE_TYPES):
- return True
-
- try: # object couldn't have a __len__ method
- if len(input_value) == 1:
- return True
- except Exception:
- pass
-
- return False
-
-
-def validate_minus_one(input_value):
- """Accept -1 only (used by pwdLastSet in AD)
- """
- if not isinstance(input_value, SEQUENCE_TYPES):
- if input_value == -1 or input_value == '-1':
- return True
-
- try: # object couldn't have a __len__ method
- if len(input_value) == 1 and input_value == -1 or input_value == '-1':
- return True
- except Exception:
- pass
-
- return False
-
-
-def validate_integer(input_value):
- if check_type(input_value, (float, bool)):
- return False
-
- if str is bytes: # Python 2, check for long too
- if check_type(input_value, (int, long)):
- return True
- else: # Python 3, int only
- if check_type(input_value, int):
- return True
-
- sequence = True # indicates if a sequence must be returned
- if not isinstance(input_value, SEQUENCE_TYPES):
- sequence = False
- input_value = [input_value]
- else:
- sequence = True # indicates if a sequence must be returned
-
- valid_values = [] # builds a list of valid int values
- from decimal import Decimal, InvalidOperation
- for element in input_value:
- try: # try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used
- value = to_unicode(element) if isinstance(element, bytes) else element
- decimal_value = Decimal(value)
- int_value = int(value)
- if decimal_value == int_value:
- valid_values.append(int_value)
- else:
- return False
- except (ValueError, TypeError, InvalidOperation):
- return False
-
- if sequence:
- return valid_values
- else:
- return valid_values[0]
-
-
-def validate_bytes(input_value):
- return check_type(input_value, bytes)
-
-
-def validate_boolean(input_value):
- # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed
- if validate_generic_single_value(input_value): # valid only if a single value or a sequence with a single element
- if isinstance(input_value, SEQUENCE_TYPES):
- input_value = input_value[0]
- if isinstance(input_value, bool):
- if input_value:
- return 'TRUE'
- else:
- return 'FALSE'
- if isinstance(input_value, STRING_TYPES):
- if input_value.lower() == 'true':
- return 'TRUE'
- elif input_value.lower() == 'false':
- return 'FALSE'
-
- return False
-
-
-def validate_time(input_value):
- # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
- if not isinstance(input_value, SEQUENCE_TYPES):
- sequence = False
- input_value = [input_value]
- else:
- sequence = True # indicates if a sequence must be returned
-
- valid_values = []
- changed = False
- for element in input_value:
- if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time
- if isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string
- valid_values.append(element)
- else:
- return False
- elif isinstance(element, datetime):
- changed = True
- if element.tzinfo: # a datetime with a timezone
- valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
- else: # datetime without timezone, assumed local and adjusted to UTC
- offset = datetime.now() - datetime.utcnow()
- valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
- else:
- return False
-
- if changed:
- if sequence:
- return valid_values
- else:
- return valid_values[0]
- else:
- return True
-
-
-def validate_ad_timestamp(input_value):
- """
- Active Directory stores date/time values as the number of 100-nanosecond intervals
- that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
- The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
- """
- if not isinstance(input_value, SEQUENCE_TYPES):
- sequence = False
- input_value = [input_value]
- else:
- sequence = True # indicates if a sequence must be returned
-
- valid_values = []
- changed = False
- for element in input_value:
- if isinstance(element, STRING_TYPES): # tries to check if it is already be a AD timestamp
- if isinstance(format_ad_timestamp(to_raw(element)), datetime): # valid Generalized Time string
- valid_values.append(element)
- else:
- return False
- elif isinstance(element, datetime):
- changed = True
- if element.tzinfo: # a datetime with a timezone
- valid_values.append(to_raw((timegm((element).utctimetuple()) + 11644473600) * 10000000, encoding='ascii'))
- else: # datetime without timezone, assumed local and adjusted to UTC
- offset = datetime.now() - datetime.utcnow()
- valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii'))
- else:
- return False
-
- if changed:
- if sequence:
- return valid_values
- else:
- return valid_values[0]
- else:
- return True
-
-
-def validate_uuid(input_value):
- """
- object guid in uuid format
- """
- if not isinstance(input_value, SEQUENCE_TYPES):
- sequence = False
- input_value = [input_value]
- else:
- sequence = True # indicates if a sequence must be returned
-
- valid_values = []
- changed = False
- for element in input_value:
- if isinstance(element, (bytes, bytearray)): # assumes bytes are valid
- valid_values.append(element)
- elif isinstance(element, STRING_TYPES):
- try:
- valid_values.append(UUID(element).bytes)
- changed = True
- except ValueError:
- return False
- else:
- return False
-
- if changed:
- if sequence:
- return valid_values
- else:
- return valid_values[0]
- else:
- return True
-
-
-def validate_uuid_le(input_value):
- """
- Active Directory stores objectGUID in uuid_le format
- """
- if not isinstance(input_value, SEQUENCE_TYPES):
- sequence = False
- input_value = [input_value]
- else:
- sequence = True # indicates if a sequence must be returned
-
- valid_values = []
- changed = False
- for element in input_value:
- if isinstance(element, (bytes, bytearray)): # assumes bytes are valid
- valid_values.append(element)
- elif isinstance(element, STRING_TYPES):
- try:
- valid_values.append(UUID(element).bytes_le)
- changed = True
- except ValueError:
- return False
- else:
- return False
-
- if changed:
- if sequence:
- return valid_values
- else:
- return valid_values[0]
- else:
- return True
+"""
+"""
+
+# Created on 2016.08.09
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2016 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+from binascii import a2b_hex, hexlify
+from datetime import datetime
+from calendar import timegm
+from uuid import UUID
+from struct import pack
+
+
+from ... import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, INTEGER_TYPES
+from .formatters import format_time, format_ad_timestamp
+from ...utils.conv import to_raw, to_unicode, ldap_escape_to_bytes, escape_bytes
+
+# Validators return True if value is valid, False if value is not valid,
+# or a value different from True and False that is a valid value to substitute to the input value
+
+
+def check_backslash(value):
+ if isinstance(value, (bytearray, bytes)):
+ if b'\\' in value:
+ value = value.replace(b'\\', b'\\5C')
+ elif isinstance(value, STRING_TYPES):
+ if '\\' in value:
+ value = value.replace('\\', '\\5C')
+ return value
+
+
+def check_type(input_value, value_type):
+ if isinstance(input_value, value_type):
+ return True
+
+ if isinstance(input_value, SEQUENCE_TYPES):
+ for value in input_value:
+ if not isinstance(value, value_type):
+ return False
+ return True
+
+ return False
+
+
+# noinspection PyUnusedLocal
+def always_valid(input_value):
+ return True
+
+
+def validate_generic_single_value(input_value):
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ return True
+
+ try: # object couldn't have a __len__ method
+ if len(input_value) == 1:
+ return True
+ except Exception:
+ pass
+
+ return False
+
+
+def validate_zero_and_minus_one_and_positive_int(input_value):
+ """Accept -1 and 0 only (used by pwdLastSet in AD)
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ if isinstance(input_value, NUMERIC_TYPES) or isinstance(input_value, STRING_TYPES):
+ return True if int(input_value) >= -1 else False
+ return False
+ else:
+ if len(input_value) == 1 and (isinstance(input_value[0], NUMERIC_TYPES) or isinstance(input_value[0], STRING_TYPES)):
+ return True if int(input_value[0]) >= -1 else False
+
+ return False
+
+
+def validate_integer(input_value):
+ if check_type(input_value, (float, bool)):
+ return False
+ if check_type(input_value, INTEGER_TYPES):
+ return True
+
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = [] # builds a list of valid int values
+ from decimal import Decimal, InvalidOperation
+ for element in input_value:
+ try: #try to convert any type to int, an invalid conversion raise TypeError or ValueError, doublecheck with Decimal type, if both are valid and equal then then int() value is used
+ value = to_unicode(element) if isinstance(element, bytes) else element
+ decimal_value = Decimal(value)
+ int_value = int(value)
+ if decimal_value == int_value:
+ valid_values.append(int_value)
+ else:
+ return False
+ except (ValueError, TypeError, InvalidOperation):
+ return False
+
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+
+
+def validate_bytes(input_value):
+ return check_type(input_value, bytes)
+
+
+def validate_boolean(input_value):
+ # it could be a real bool or the string TRUE or FALSE, # only a single valued is allowed
+ if validate_generic_single_value(input_value): # valid only if a single value or a sequence with a single element
+ if isinstance(input_value, SEQUENCE_TYPES):
+ input_value = input_value[0]
+ if isinstance(input_value, bool):
+ if input_value:
+ return 'TRUE'
+ else:
+ return 'FALSE'
+ if str is not bytes and isinstance(input_value, bytes): # python3 try to converts bytes to string
+ input_value = to_unicode(input_value)
+ if isinstance(input_value, STRING_TYPES):
+ if input_value.lower() == 'true':
+ return 'TRUE'
+ elif input_value.lower() == 'false':
+ return 'FALSE'
+ return False
+
+
+def validate_time_with_0_year(input_value):
+ # validates generalized time but accept a 0000 year too
+ # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
+ element = to_unicode(element)
+ if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time
+ if element.startswith('0000') or isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string
+ valid_values.append(element)
+ else:
+ return False
+ elif isinstance(element, datetime):
+ changed = True
+ if element.tzinfo: # a datetime with a timezone
+ valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
+ else: # datetime without timezone, assumed local and adjusted to UTC
+ offset = datetime.now() - datetime.utcnow()
+ valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
+ else:
+ return False
+
+ if changed:
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_time(input_value):
+ # if datetime object doesn't have a timezone it's considered local time and is adjusted to UTC
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
+ element = to_unicode(element)
+ if isinstance(element, STRING_TYPES): # tries to check if it is already be a Generalized Time
+ if isinstance(format_time(to_raw(element)), datetime): # valid Generalized Time string
+ valid_values.append(element)
+ else:
+ return False
+ elif isinstance(element, datetime):
+ changed = True
+ if element.tzinfo: # a datetime with a timezone
+ valid_values.append(element.strftime('%Y%m%d%H%M%S%z'))
+ else: # datetime without timezone, assumed local and adjusted to UTC
+ offset = datetime.now() - datetime.utcnow()
+ valid_values.append((element - offset).strftime('%Y%m%d%H%M%SZ'))
+ else:
+ return False
+
+ if changed:
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_ad_timestamp(input_value):
+ """
+ Active Directory stores date/time values as the number of 100-nanosecond intervals
+ that have elapsed since the 0 hour on January 1, 1601 till the date/time that is being stored.
+ The time is always stored in Greenwich Mean Time (GMT) in the Active Directory.
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if str is not bytes and isinstance(element, bytes): # python3 try to converts bytes to string
+ element = to_unicode(element)
+ if isinstance(element, NUMERIC_TYPES):
+ if 0 <= element <= 9223372036854775807: # min and max for the AD timestamp starting from 12:00 AM January 1, 1601
+ valid_values.append(element)
+ else:
+ return False
+ elif isinstance(element, STRING_TYPES): # tries to check if it is already be a AD timestamp
+ if isinstance(format_ad_timestamp(to_raw(element)), datetime): # valid Generalized Time string
+ valid_values.append(element)
+ else:
+ return False
+ elif isinstance(element, datetime):
+ changed = True
+ if element.tzinfo: # a datetime with a timezone
+ valid_values.append(to_raw((timegm(element.utctimetuple()) + 11644473600) * 10000000, encoding='ascii'))
+ else: # datetime without timezone, assumed local and adjusted to UTC
+ offset = datetime.now() - datetime.utcnow()
+ valid_values.append(to_raw((timegm((element - offset).timetuple()) + 11644473600) * 10000000, encoding='ascii'))
+ else:
+ return False
+
+ if changed:
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_ad_timedelta(input_value):
+ """
+ Should be validated like an AD timestamp except that since it is a time
+ delta, it is stored as a negative number.
+ """
+ if not isinstance(input_value, INTEGER_TYPES) or input_value > 0:
+ return False
+ return validate_ad_timestamp(input_value * -1)
+
+
+def validate_guid(input_value):
+ """
+ object guid in uuid format (Novell eDirectory)
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if isinstance(element, STRING_TYPES):
+ try:
+ valid_values.append(UUID(element).bytes)
+ changed = True
+ except ValueError: # try if the value is an escaped byte sequence
+ try:
+ valid_values.append(UUID(element.replace('\\', '')).bytes)
+ changed = True
+ continue
+ except ValueError:
+ if str is not bytes: # python 3
+ pass
+ else:
+ valid_values.append(element)
+ continue
+ return False
+ elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid
+ valid_values.append(element)
+ else:
+ return False
+
+ if changed:
+ valid_values = [check_backslash(value) for value in valid_values]
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_uuid(input_value):
+ """
+ object entryUUID in uuid format
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if isinstance(element, STRING_TYPES):
+ try:
+ valid_values.append(str(UUID(element)))
+ changed = True
+ except ValueError: # try if the value is an escaped byte sequence
+ try:
+ valid_values.append(str(UUID(element.replace('\\', ''))))
+ changed = True
+ continue
+ except ValueError:
+ if str is not bytes: # python 3
+ pass
+ else:
+ valid_values.append(element)
+ continue
+ return False
+ elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid
+ valid_values.append(element)
+ else:
+ return False
+
+ if changed:
+ valid_values = [check_backslash(value) for value in valid_values]
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_uuid_le(input_value):
+ """
+ Active Directory stores objectGUID in uuid_le format, follows RFC4122 and MS-DTYP:
+ "{07039e68-4373-264d-a0a7-07039e684373}": string representation big endian, converted to little endian (with or without brace curles)
+ "689e030773434d26a7a007039e684373": packet representation, already in little endian
+ "\68\9e\03\07\73\43\4d\26\a7\a0\07\03\9e\68\43\73": bytes representation, already in little endian
+ byte sequence: already in little endian
+
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ error = False
+ if isinstance(element, STRING_TYPES):
+ if element[0] == '{' and element[-1] == '}':
+ try:
+ valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian
+ changed = True
+ except ValueError:
+ error = True
+ elif '-' in element:
+ try:
+ valid_values.append(UUID(hex=element).bytes_le) # string representation, value in big endian, converts to little endian
+ changed = True
+ except ValueError:
+ error = True
+ elif '\\' in element:
+ try:
+ uuid = UUID(bytes_le=ldap_escape_to_bytes(element)).bytes_le
+ uuid = escape_bytes(uuid)
+ valid_values.append(uuid) # byte representation, value in little endian
+ changed = True
+ except ValueError:
+ error = True
+ elif '-' not in element: # value in little endian
+ try:
+ valid_values.append(UUID(bytes_le=a2b_hex(element)).bytes_le) # packet representation, value in little endian, converts to little endian
+ changed = True
+ except ValueError:
+ error = True
+ if error and str == bytes: # python2 only assume value is bytes and valid
+ valid_values.append(element) # value is untouched, must be in little endian
+ elif isinstance(element, (bytes, bytearray)): # assumes bytes are valid uuid
+ valid_values.append(element) # value is untouched, must be in little endian
+ else:
+ return False
+
+ if changed:
+ valid_values = [check_backslash(value) for value in valid_values]
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
+
+
+def validate_sid(input_value):
+ """
+ SID= "S-1-" IdentifierAuthority 1*SubAuthority
+ IdentifierAuthority= IdentifierAuthorityDec / IdentifierAuthorityHex
+ ; If the identifier authority is < 2^32, the
+ ; identifier authority is represented as a decimal
+ ; number
+ ; If the identifier authority is >= 2^32,
+ ; the identifier authority is represented in
+ ; hexadecimal
+ IdentifierAuthorityDec = 1*10DIGIT
+ ; IdentifierAuthorityDec, top level authority of a
+ ; security identifier is represented as a decimal number
+ IdentifierAuthorityHex = "0x" 12HEXDIG
+ ; IdentifierAuthorityHex, the top-level authority of a
+ ; security identifier is represented as a hexadecimal number
+ SubAuthority= "-" 1*10DIGIT
+ ; Sub-Authority is always represented as a decimal number
+ ; No leading "0" characters are allowed when IdentifierAuthority
+ ; or SubAuthority is represented as a decimal number
+ ; All hexadecimal digits must be output in string format,
+ ; pre-pended by "0x"
+
+ Revision (1 byte): An 8-bit unsigned integer that specifies the revision level of the SID. This value MUST be set to 0x01.
+ SubAuthorityCount (1 byte): An 8-bit unsigned integer that specifies the number of elements in the SubAuthority array. The maximum number of elements allowed is 15.
+ IdentifierAuthority (6 bytes): A SID_IDENTIFIER_AUTHORITY structure that indicates the authority under which the SID was created. It describes the entity that created the SID. The Identifier Authority value {0,0,0,0,0,5} denotes SIDs created by the NT SID authority.
+ SubAuthority (variable): A variable length array of unsigned 32-bit integers that uniquely identifies a principal relative to the IdentifierAuthority. Its length is determined by SubAuthorityCount.
+
+ If you have a SID like S-a-b-c-d-e-f-g-...
+
+ Then the bytes are
+ a (revision)
+ N (number of dashes minus two)
+ bbbbbb (six bytes of "b" treated as a 48-bit number in big-endian format)
+ cccc (four bytes of "c" treated as a 32-bit number in little-endian format)
+ dddd (four bytes of "d" treated as a 32-bit number in little-endian format)
+ eeee (four bytes of "e" treated as a 32-bit number in little-endian format)
+ ffff (four bytes of "f" treated as a 32-bit number in little-endian format)
+
+ """
+ if not isinstance(input_value, SEQUENCE_TYPES):
+ sequence = False
+ input_value = [input_value]
+ else:
+ sequence = True # indicates if a sequence must be returned
+
+ valid_values = []
+ changed = False
+ for element in input_value:
+ if isinstance(element, STRING_TYPES):
+ if element.startswith('S-'):
+ parts = element.split('-')
+ sid_bytes = pack('<q', int(parts[1]))[0:1] # revision number
+ sid_bytes += pack('<q', len(parts[3:]))[0:1] # number of sub authorities
+ if len(parts[2]) <= 10:
+ sid_bytes += pack('>q', int(parts[2]))[2:] # authority (in dec)
+ else:
+ sid_bytes += pack('>q', int(parts[2], 16))[2:] # authority (in hex)
+ for sub_auth in parts[3:]:
+ sid_bytes += pack('<q', int(sub_auth))[0:4] # sub-authorities
+ valid_values.append(sid_bytes)
+ changed = True
+
+ if changed:
+ valid_values = [check_backslash(value) for value in valid_values]
+ if sequence:
+ return valid_values
+ else:
+ return valid_values[0]
+ else:
+ return True
diff --git a/ldap3/protocol/microsoft.py b/ldap3/protocol/microsoft.py
index 35054f4..58e5172 100644
--- a/ldap3/protocol/microsoft.py
+++ b/ldap3/protocol/microsoft.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -81,6 +81,14 @@ class DirSyncControlResponseValue(Sequence):
)
+class SdFlags(Sequence):
+ # SDFlagsRequestValue ::= SEQUENCE {
+ # Flags INTEGER
+ # }
+ componentType = NamedTypes(NamedType('Flags', Integer())
+ )
+
+
class ExtendedDN(Sequence):
# A flag value 0 specifies that the GUID and SID values be returned in hexadecimal string
# A flag value of 1 will return the GUID and SID values in standard string format
@@ -123,3 +131,9 @@ def extended_dn_control(criticality=False, hex_format=False):
def show_deleted_control(criticality=False):
return build_control('1.2.840.113556.1.4.417', criticality, value=None)
+
+
+def security_descriptor_control(criticality=False, sdflags=0x0F):
+ sdcontrol = SdFlags()
+ sdcontrol.setComponentByName('Flags', sdflags)
+ return [build_control('1.2.840.113556.1.4.801', criticality, sdcontrol)]
diff --git a/ldap3/protocol/novell.py b/ldap3/protocol/novell.py
index 8667e8f..af8aeb3 100644
--- a/ldap3/protocol/novell.py
+++ b/ldap3/protocol/novell.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/oid.py b/ldap3/protocol/oid.py
index 3f83b77..6dfadd3 100644
--- a/ldap3/protocol/oid.py
+++ b/ldap3/protocol/oid.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/persistentSearch.py b/ldap3/protocol/persistentSearch.py
index e13192c..cb79aa2 100644
--- a/ldap3/protocol/persistentSearch.py
+++ b/ldap3/protocol/persistentSearch.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/rfc2696.py b/ldap3/protocol/rfc2696.py
index 49846a5..66b647b 100644
--- a/ldap3/protocol/rfc2696.py
+++ b/ldap3/protocol/rfc2696.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/rfc2849.py b/ldap3/protocol/rfc2849.py
index c4e8122..953be33 100644
--- a/ldap3/protocol/rfc2849.py
+++ b/ldap3/protocol/rfc2849.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -108,7 +108,7 @@ def add_attributes(attributes, all_base64):
# remaining attributes
for attr in attributes:
- if attr != oc_attr:
+ if attr != oc_attr and attr in attributes:
for val in attributes[attr]:
lines.append(_convert_to_ldif(attr, val, all_base64))
@@ -123,18 +123,21 @@ def sort_ldif_lines(lines, sort_order):
def search_response_to_ldif(entries, all_base64, sort_order=None):
lines = []
- for entry in entries:
- if 'dn' in entry:
- lines.append(_convert_to_ldif('dn', entry['dn'], all_base64))
- lines.extend(add_attributes(entry['raw_attributes'], all_base64))
- else:
- raise LDAPLDIFError('unable to convert to LDIF-CONTENT - missing DN')
- if sort_order:
- lines = sort_ldif_lines(lines, sort_order)
- lines.append('')
-
- if lines:
- lines.append('# total number of entries: ' + str(len(entries)))
+ if entries:
+ for entry in entries:
+ if not entry:
+ continue
+ if 'dn' in entry:
+ lines.append(_convert_to_ldif('dn', entry['dn'], all_base64))
+ lines.extend(add_attributes(entry['raw_attributes'], all_base64))
+ else:
+ raise LDAPLDIFError('unable to convert to LDIF-CONTENT - missing DN')
+ if sort_order:
+ lines = sort_ldif_lines(lines, sort_order)
+ lines.append('')
+
+ if lines:
+ lines.append('# total number of entries: ' + str(len(entries)))
return lines
@@ -143,7 +146,9 @@ def add_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
- lines.extend(add_controls(entry['controls'], all_base64))
+ control_lines = add_controls(entry['controls'], all_base64)
+ if control_lines:
+ lines.extend(control_lines)
lines.append('changetype: add')
lines.extend(add_attributes(entry['attributes'], all_base64))
if sort_order:
@@ -159,7 +164,9 @@ def delete_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
- lines.append(add_controls(entry['controls'], all_base64))
+ control_lines = add_controls(entry['controls'], all_base64)
+ if control_lines:
+ lines.extend(control_lines)
lines.append('changetype: delete')
if sort_order:
lines = sort_ldif_lines(lines, sort_order)
@@ -173,7 +180,9 @@ def modify_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
- lines.extend(add_controls(entry['controls'], all_base64))
+ control_lines = add_controls(entry['controls'], all_base64)
+ if control_lines:
+ lines.extend(control_lines)
lines.append('changetype: modify')
if 'changes' in entry:
for change in entry['changes']:
@@ -190,7 +199,9 @@ def modify_dn_request_to_ldif(entry, all_base64, sort_order=None):
lines = []
if 'entry' in entry:
lines.append(_convert_to_ldif('dn', entry['entry'], all_base64))
- lines.extend(add_controls(entry['controls'], all_base64))
+ control_lines = add_controls(entry['controls'], all_base64)
+ if control_lines:
+ lines.extend(control_lines)
lines.append('changetype: modrdn') if 'newSuperior' in entry and entry['newSuperior'] else lines.append('changetype: moddn')
lines.append(_convert_to_ldif('newrdn', entry['newRdn'], all_base64))
lines.append('deleteoldrdn: ' + ('1' if entry['deleteOldRdn'] else '0'))
@@ -262,8 +273,8 @@ def decode_persistent_search_control(change):
decoded['changeType'] = 'modify dn'
else:
raise LDAPExtensionError('unknown Persistent Search changeType ' + str(decoded_control['changeType']))
- decoded['changeNumber'] = decoded_control['changeNumber'] if 'changeNumber' in decoded_control else None
- decoded['previousDN'] = decoded_control['previousDN'] if 'previousDN' in decoded_control else None
+ decoded['changeNumber'] = decoded_control['changeNumber'] if 'changeNumber' in decoded_control and decoded_control['changeNumber'] is not None and decoded_control['changeNumber'].hasValue() else None
+ decoded['previousDN'] = decoded_control['previousDN'] if 'previousDN' in decoded_control and decoded_control['previousDN'] is not None and decoded_control['previousDN'].hasValue() else None
return decoded
return None
diff --git a/ldap3/protocol/rfc3062.py b/ldap3/protocol/rfc3062.py
index e5ed2ff..c720984 100644
--- a/ldap3/protocol/rfc3062.py
+++ b/ldap3/protocol/rfc3062.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/rfc4511.py b/ldap3/protocol/rfc4511.py
index 711d62a..07f67be 100644
--- a/ldap3/protocol/rfc4511.py
+++ b/ldap3/protocol/rfc4511.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/rfc4512.py b/ldap3/protocol/rfc4512.py
index bdb2f0a..407b2d1 100644
--- a/ldap3/protocol/rfc4512.py
+++ b/ldap3/protocol/rfc4512.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -31,7 +31,7 @@ from .oid import CLASS_ABSTRACT, CLASS_STRUCTURAL, CLASS_AUXILIARY, ATTRIBUTE_US
ATTRIBUTE_DIRECTORY_OPERATION, ATTRIBUTE_DISTRIBUTED_OPERATION, ATTRIBUTE_DSA_OPERATION
from .. import SEQUENCE_TYPES, STRING_TYPES, get_config_parameter
from ..utils.conv import escape_bytes, json_hook, check_json_dict, format_json, to_unicode
-from ..utils.ciDict import CaseInsensitiveDict
+from ..utils.ciDict import CaseInsensitiveWithAliasDict
from ..protocol.formatters.standard import format_attribute_values
from .oid import Oids, decode_oids, decode_syntax, oid_to_string
from ..core.exceptions import LDAPSchemaError, LDAPDefinitionError
@@ -68,7 +68,7 @@ def attribute_usage_to_constant(value):
return ATTRIBUTE_DIRECTORY_OPERATION
elif value == 'distributedOperation':
return ATTRIBUTE_DISTRIBUTED_OPERATION
- elif value == 'dsaOperation':
+ elif value == 'dSAOperation':
return ATTRIBUTE_DSA_OPERATION
else:
return 'unknown'
@@ -123,7 +123,7 @@ class BaseServerInfo(object):
raise LDAPDefinitionError('invalid JSON definition')
if conf_case_insensitive_schema:
- attributes = CaseInsensitiveDict()
+ attributes = CaseInsensitiveWithAliasDict()
else:
attributes = dict()
@@ -429,10 +429,10 @@ class BaseObjectInfo(object):
conf_case_insensitive_schema = get_config_parameter('CASE_INSENSITIVE_SCHEMA_NAMES')
conf_ignore_malformed_schema = get_config_parameter('IGNORE_MALFORMED_SCHEMA')
- ret_dict = CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
+ ret_dict = CaseInsensitiveWithAliasDict() if conf_case_insensitive_schema else dict()
if not definitions:
- return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
+ return ret_dict
for object_definition in definitions:
object_definition = to_unicode(object_definition.strip(), from_server=True)
@@ -523,7 +523,7 @@ class BaseObjectInfo(object):
if not conf_ignore_malformed_schema:
raise LDAPSchemaError('malformed schema definition key:' + key + ' - use get_info=NONE in Server definition')
else:
- return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
+ return CaseInsensitiveWithAliasDict() if conf_case_insensitive_schema else dict()
object_def.raw_definition = object_definition
if hasattr(object_def, 'syntax') and object_def.syntax and len(object_def.syntax) == 1:
object_def.min_length = None
@@ -538,8 +538,12 @@ class BaseObjectInfo(object):
object_def.syntax[0] = object_def.syntax[0].strip("'")
object_def.syntax = object_def.syntax[0]
if hasattr(object_def, 'name') and object_def.name:
- for name in object_def.name:
- ret_dict[name] = object_def
+ if conf_case_insensitive_schema:
+ ret_dict[object_def.name[0]] = object_def
+ ret_dict.set_alias(object_def.name[0], object_def.name[1:] + [object_def.oid], ignore_duplicates=True)
+ else:
+ for name in object_def.name:
+ ret_dict[name] = object_def
else:
ret_dict[object_def.oid] = object_def
@@ -547,7 +551,7 @@ class BaseObjectInfo(object):
if not conf_ignore_malformed_schema:
raise LDAPSchemaError('malformed schema definition, use get_info=NONE in Server definition')
else:
- return CaseInsensitiveDict() if conf_case_insensitive_schema else dict()
+ return CaseInsensitiveWithAliasDict() if conf_case_insensitive_schema else dict()
return ret_dict
diff --git a/ldap3/protocol/rfc4527.py b/ldap3/protocol/rfc4527.py
index 874a735..6213daa 100644
--- a/ldap3/protocol/rfc4527.py
+++ b/ldap3/protocol/rfc4527.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/sasl/digestMd5.py b/ldap3/protocol/sasl/digestMd5.py
index c598351..48235d6 100644
--- a/ldap3/protocol/sasl/digestMd5.py
+++ b/ldap3/protocol/sasl/digestMd5.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -66,7 +66,7 @@ def md5_hmac(k, s):
if not isinstance(s, bytes):
s = s.encode()
- return hmac.new(k, s).hexdigest()
+ return hmac.new(k, s, digestmod=hashlib.md5).hexdigest()
def sasl_digest_md5(connection, controls):
diff --git a/ldap3/protocol/sasl/external.py b/ldap3/protocol/sasl/external.py
index 32ebc0a..7bb8bc1 100644
--- a/ldap3/protocol/sasl/external.py
+++ b/ldap3/protocol/sasl/external.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/sasl/kerberos.py b/ldap3/protocol/sasl/kerberos.py
index 5000ebf..07db583 100644
--- a/ldap3/protocol/sasl/kerberos.py
+++ b/ldap3/protocol/sasl/kerberos.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -58,9 +58,14 @@ def sasl_gssapi(connection, controls):
- If omitted or None, the authentication ID is used as the authorization ID
- If a string, the authorization ID to use. Should start with "dn:" or "user:".
+
+ The optional third element is a raw gssapi credentials structure which can be used over
+ the implicit use of a krb ccache.
"""
target_name = None
authz_id = b""
+ raw_creds = None
+ creds = None
if connection.sasl_credentials:
if len(connection.sasl_credentials) >= 1 and connection.sasl_credentials[0]:
if connection.sasl_credentials[0] is True:
@@ -70,9 +75,15 @@ def sasl_gssapi(connection, controls):
target_name = gssapi.Name('ldap@' + connection.sasl_credentials[0], gssapi.NameType.hostbased_service)
if len(connection.sasl_credentials) >= 2 and connection.sasl_credentials[1]:
authz_id = connection.sasl_credentials[1].encode("utf-8")
+ if len(connection.sasl_credentials) >= 3 and connection.sasl_credentials[2]:
+ raw_creds = connection.sasl_credentials[2]
if target_name is None:
target_name = gssapi.Name('ldap@' + connection.server.host, gssapi.NameType.hostbased_service)
- creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate') if connection.user else None
+
+ if raw_creds is not None:
+ creds = gssapi.Credentials(base=raw_creds, usage='initiate', store=connection.cred_store)
+ else:
+ creds = gssapi.Credentials(name=gssapi.Name(connection.user), usage='initiate', store=connection.cred_store) if connection.user else None
ctx = gssapi.SecurityContext(name=target_name, mech=gssapi.MechType.kerberos, creds=creds)
in_token = None
try:
diff --git a/ldap3/protocol/sasl/plain.py b/ldap3/protocol/sasl/plain.py
index 1de2a36..f7f7456 100644
--- a/ldap3/protocol/sasl/plain.py
+++ b/ldap3/protocol/sasl/plain.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/sasl/sasl.py b/ldap3/protocol/sasl/sasl.py
index 375b235..30fe0e9 100644
--- a/ldap3/protocol/sasl/sasl.py
+++ b/ldap3/protocol/sasl/sasl.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/schemas/ad2012R2.py b/ldap3/protocol/schemas/ad2012R2.py
index f583973..1712613 100644
--- a/ldap3/protocol/schemas/ad2012R2.py
+++ b/ldap3/protocol/schemas/ad2012R2.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/schemas/ds389.py b/ldap3/protocol/schemas/ds389.py
index 0ede92f..f0e19dc 100644
--- a/ldap3/protocol/schemas/ds389.py
+++ b/ldap3/protocol/schemas/ds389.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/protocol/schemas/edir888.py b/ldap3/protocol/schemas/edir888.py
index 630d7dc..8243a7e 100644
--- a/ldap3/protocol/schemas/edir888.py
+++ b/ldap3/protocol/schemas/edir888.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -939,12 +939,7 @@ edir_8_8_8_dsa_info = """
"addEntryOps": [
"947"
],
- "altServer": [
- "ldap://192.168.137.102:389/",
- "ldaps://192.168.137.102:636/",
- "ldap://192.168.137.103:389/",
- "ldaps://192.168.137.103:636/"
- ],
+ "altServer": [],
"bindSecurityErrors": [
"3"
],
diff --git a/ldap3/protocol/schemas/edir914.py b/ldap3/protocol/schemas/edir914.py
new file mode 100644
index 0000000..0a1d2e6
--- /dev/null
+++ b/ldap3/protocol/schemas/edir914.py
@@ -0,0 +1,1157 @@
+"""
+"""
+
+# Created on 2019.08.31
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+edir_9_1_4_schema = """
+{
+ "raw": {
+ "attributeTypes": [
+ "( 2.5.4.35 NAME 'userPassword' DESC 'Internal NDS policy forces this to be single-valued' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{128} USAGE directoryOperation )",
+ "( 2.5.18.1 NAME 'createTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.5.18.2 NAME 'modifyTimestamp' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.5.18.10 NAME 'subschemaSubentry' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation )",
+ "( 2.5.21.9 NAME 'structuralObjectClass' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.16.840.1.113719.1.27.4.49 NAME 'subordinateCount' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.16.840.1.113719.1.27.4.48 NAME 'entryFlags' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.16.840.1.113719.1.27.4.51 NAME 'federationBoundary' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.5.21.5 NAME 'attributeTypes' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.3 USAGE directoryOperation )",
+ "( 2.5.21.6 NAME 'objectClasses' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.37 USAGE directoryOperation )",
+ "( 1.3.6.1.1.20 NAME 'entryDN' DESC 'Operational Attribute' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.16.840.1.113719.1.1.4.1.2 NAME 'ACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.5.4.1 NAME 'aliasedObjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Aliased Object Name' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.6 NAME 'backLink' SYNTAX 2.16.840.1.113719.1.1.5.1.23 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Back Link' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.8 NAME 'binderyProperty' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Property' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.7 NAME 'binderyObjectRestriction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Object Restriction' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.9 NAME 'binderyType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Bindery Type' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.11 NAME 'cAPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.12 NAME 'cAPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'CA Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.10 NAME 'Cartridge' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.3 NAME ( 'cn' 'commonName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'CN' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.78 NAME 'printerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Printer Configuration' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.15 NAME 'Convergence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{1} SINGLE-VALUE X-NDS_UPPER_BOUND '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.6 NAME ( 'c' 'countryName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{2} SINGLE-VALUE X-NDS_NAME 'C' X-NDS_LOWER_BOUND '2' X-NDS_UPPER_BOUND '2' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.18 NAME 'defaultQueue' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Default Queue' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.13 NAME ( 'description' 'multiLineDescription' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{1024} X-NDS_NAME 'Description' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '1024' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.64 NAME 'partitionCreationTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Creation Time' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.5.4.23 NAME 'facsimileTelephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.22{64512} X-NDS_NAME 'Facsimile Telephone Number' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.117 NAME 'highConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'High Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.25 NAME 'groupMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Group Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.26 NAME 'ndsHomeDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{255} SINGLE-VALUE X-NDS_NAME 'Home Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.27 NAME 'hostDevice' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Device' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.28 NAME 'hostResourceName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Host Resource Name' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.29 NAME 'hostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Host Server' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.30 NAME 'inheritedACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Inherited ACL' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.5.4.7 NAME ( 'l' 'localityname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'L' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.39 NAME 'loginAllowedTimeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{42} SINGLE-VALUE X-NDS_NAME 'Login Allowed Time Map' X-NDS_LOWER_BOUND '42' X-NDS_UPPER_BOUND '42' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.40 NAME 'loginDisabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Login Disabled' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.41 NAME 'loginExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Expiration Time' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.42 NAME 'loginGraceLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Grace Limit' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.43 NAME 'loginGraceRemaining' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Grace Remaining' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.44 NAME 'loginIntruderAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 SINGLE-VALUE X-NDS_NAME 'Login Intruder Address' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.45 NAME 'loginIntruderAttempts' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Login Intruder Attempts' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.46 NAME 'loginIntruderLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Intruder Limit' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.31 NAME 'intruderAttemptResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Attempt Reset Interval' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.47 NAME 'loginIntruderResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Intruder Reset Time' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.48 NAME 'loginMaximumSimultaneous' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Login Maximum Simultaneous' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.49 NAME 'loginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Login Script' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.50 NAME 'loginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.31 NAME ( 'member' 'uniqueMember' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Member' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.52 NAME 'Memory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.22 NAME 'eMailAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.14{64512} X-NDS_NAME 'EMail Address' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.55 NAME 'networkAddress' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.56 NAME 'networkAddressRestriction' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NAME 'Network Address Restriction' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.57 NAME 'notify' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Notify' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.114 NAME 'Obituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.5.4.0 NAME 'objectClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_NAME 'Object Class' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.59 NAME 'operator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Operator' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.11 NAME ( 'ou' 'organizationalUnitName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'OU' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.10 NAME ( 'o' 'organizationname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'O' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.32 NAME 'owner' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Owner' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.63 NAME 'pageDescriptionLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Page Description Language' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.65 NAME 'passwordsUsed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'Passwords Used' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.66 NAME 'passwordAllowChange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Allow Change' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.67 NAME 'passwordExpirationInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Password Expiration Interval' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.68 NAME 'passwordExpirationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NAME 'Password Expiration Time' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.69 NAME 'passwordMinimumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Password Minimum Length' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.70 NAME 'passwordRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Required' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.71 NAME 'passwordUniqueRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Password Unique Required' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.72 NAME 'path' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Path' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.19 NAME 'physicalDeliveryOfficeName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'Physical Delivery Office Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.16 NAME 'postalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'Postal Address' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.17 NAME 'postalCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Code' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.18 NAME 'postOfficeBox' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{40} X-NDS_NAME 'Postal Office Box' X-NDS_UPPER_BOUND '40' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.80 NAME 'printJobConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Print Job Configuration' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.79 NAME 'printerControl' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Printer Control' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.82 NAME 'privateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Private Key' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.83 NAME 'Profile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.84 NAME 'publicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Public Key' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.85 NAME 'queue' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Queue' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.86 NAME 'queueDirectory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{255} SINGLE-VALUE X-NDS_NAME 'Queue Directory' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '255' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.115 NAME 'Reference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.88 NAME 'Replica' SYNTAX 2.16.840.1.113719.1.1.5.1.16{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.89 NAME 'Resource' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.33 NAME 'roleOccupant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Role Occupant' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.116 NAME 'higherPrivileges' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Higher Privileges' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.92 NAME 'securityEquals' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Security Equals' X-NDS_SERVER_READ '1' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.5.4.34 NAME 'seeAlso' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'See Also' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.5 NAME 'serialNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64} X-NDS_NAME 'Serial Number' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.95 NAME 'server' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Server' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.8 NAME ( 'st' 'stateOrProvinceName' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'S' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.98 NAME 'status' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Status' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
+ "( 2.5.4.9 NAME 'street' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'SA' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.102 NAME 'supportedTypefaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Typefaces' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.101 NAME 'supportedServices' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Supported Services' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.4 NAME ( 'sn' 'surname' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Surname' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.20 NAME 'telephoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} X-NDS_NAME 'Telephone Number' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.12 NAME 'title' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'Title' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.111 NAME 'User' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.112 NAME 'Version' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.1 NAME 'accountBalance' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_NAME 'Account Balance' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.4 NAME 'allowUnlimitedCredit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Allow Unlimited Credit' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.118 NAME 'lowConvergenceResetTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Low Convergence Reset Time' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.54 NAME 'minimumAccountBalance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Minimum Account Balance' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.104 NAME 'lowConvergenceSyncInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Low Convergence Sync Interval' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.21 NAME 'Device' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.53 NAME 'messageServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Message Server' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.34 NAME 'Language' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.100 NAME 'supportedConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Supported Connections' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.107 NAME 'typeCreatorMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Type Creator Map' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.108 NAME 'ndsUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'UID' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.24 NAME 'groupID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'GID' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.110 NAME 'unknownBaseClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Unknown Base Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.87 NAME 'receivedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.33 NAME 'synchronizedUpTo' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Synchronized Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.5 NAME 'authorityRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Authority Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.13 NAME 'certificateRevocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Certificate Revocation' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.17 NAME 'ndsCrossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.37 NAME 'lockedByIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Locked By Intruder' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.77 NAME 'printer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 X-NDS_NAME 'Printer' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.20 NAME 'detectIntruder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Detect Intruder' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.38 NAME 'lockoutAfterDetection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Lockout After Detection' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.32 NAME 'intruderLockoutResetInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_NAME 'Intruder Lockout Reset Interval' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.96 NAME 'serverHolds' SYNTAX 2.16.840.1.113719.1.1.5.1.26 X-NDS_NAME 'Server Holds' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.91 NAME 'sAPName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{47} SINGLE-VALUE X-NDS_NAME 'SAP Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '47' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.113 NAME 'Volume' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.35 NAME 'lastLoginTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Last Login Time' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.81 NAME 'printServer' SYNTAX 2.16.840.1.113719.1.1.5.1.25 SINGLE-VALUE X-NDS_NAME 'Print Server' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.119 NAME 'nNSDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_NAME 'NNS Domain' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.120 NAME 'fullName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{127} X-NDS_NAME 'Full Name' X-NDS_UPPER_BOUND '127' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.121 NAME 'partitionControl' SYNTAX 2.16.840.1.113719.1.1.5.1.25 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Control' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.122 NAME 'revision' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Revision' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.123 NAME 'certificateValidityInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'Certificate Validity Interval' X-NDS_LOWER_BOUND '60' X-NDS_UPPER_BOUND '-1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.124 NAME 'externalSynchronizer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'External Synchronizer' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.125 NAME 'messagingDatabaseLocation' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Messaging Database Location' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.126 NAME 'messageRoutingGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Message Routing Group' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.127 NAME 'messagingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Messaging Server' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.128 NAME 'Postmaster' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.162 NAME 'mailboxLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Mailbox Location' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.163 NAME 'mailboxID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Mailbox ID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.164 NAME 'externalName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'External Name' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.165 NAME 'securityFlags' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Security Flags' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.166 NAME 'messagingServerType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} SINGLE-VALUE X-NDS_NAME 'Messaging Server Type' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.167 NAME 'lastReferencedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Last Referenced Time' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.5.4.42 NAME 'givenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_NAME 'Given Name' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.43 NAME 'initials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} X-NDS_NAME 'Initials' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.4.44 NAME 'generationQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{8} SINGLE-VALUE X-NDS_NAME 'Generational Qualifier' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '8' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.171 NAME 'profileMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Profile Membership' X-NDS_NAME_VALUE_ACCESS '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.172 NAME 'dsRevision' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'DS Revision' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_OPERATIONAL '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.173 NAME 'supportedGateway' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{4096} X-NDS_NAME 'Supported Gateway' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '4096' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.174 NAME 'equivalentToMe' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Equivalent To Me' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.175 NAME 'replicaUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Replica Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.176 NAME 'partitionStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Partition Status' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.177 NAME 'permanentConfigParms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Permanent Config Parms' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.178 NAME 'Timezone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.179 NAME 'binderyRestrictionLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Bindery Restriction Level' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.180 NAME 'transitiveVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Transitive Vector' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.181 NAME 'T' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.183 NAME 'purgeVector' SYNTAX 2.16.840.1.113719.1.1.5.1.19 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Purge Vector' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_SCHED_SYNC_NEVER '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.184 NAME 'synchronizationTolerance' SYNTAX 2.16.840.1.113719.1.1.5.1.19 USAGE directoryOperation X-NDS_NAME 'Synchronization Tolerance' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.185 NAME 'passwordManagement' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'Password Management' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.186 NAME 'usedBy' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Used By' X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.187 NAME 'Uses' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_SERVER_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.500 NAME 'obituaryNotify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Obituary Notify' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.501 NAME 'GUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.502 NAME 'otherGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{16} USAGE directoryOperation X-NDS_NAME 'Other GUID' X-NDS_LOWER_BOUND '16' X-NDS_UPPER_BOUND '16' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.503 NAME 'auxiliaryClassFlag' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Auxiliary Class Flag' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.504 NAME 'unknownAuxiliaryClass' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32} USAGE directoryOperation X-NDS_NAME 'Unknown Auxiliary Class' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 0.9.2342.19200300.100.1.1 NAME ( 'uid' 'userId' ) SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64} X-NDS_NAME 'uniqueID' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 0.9.2342.19200300.100.1.25 NAME 'dc' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64} X-NDS_NAME 'dc' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '64' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.507 NAME 'auxClassObjectClassBackup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'AuxClass Object Class Backup' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.508 NAME 'localReceivedUpTo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NAME 'Local Received Up To' X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.141.4.4 NAME 'federationControl' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.141.4.2 NAME 'federationSearchPath' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.141.4.3 NAME 'federationDNSName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.141.4.1 NAME 'federationBoundaryType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.14.4.1.4 NAME 'DirXML-Associations' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' )",
+ "( 2.5.18.3 NAME 'creatorsName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.5.18.4 NAME 'modifiersName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_FILTERED_REQUIRED '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.300 NAME 'languageId' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.35 NAME 'ndsPredicate' SYNTAX 2.16.840.1.113719.1.1.5.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.36 NAME 'ndsPredicateState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.37 NAME 'ndsPredicateFlush' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.38 NAME 'ndsPredicateTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.40 NAME 'ndsPredicateStatsDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.4.39 NAME 'ndsPredicateUseValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.601 NAME 'syncPanePoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.600 NAME 'syncWindowVector' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.602 NAME 'objectVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.27.4.52 NAME 'memberQueryURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'memberQuery' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.302 NAME 'excludedMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.525 NAME 'auxClassCompatibility' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38 NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.518 NAME 'ndsAgentPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.519 NAME 'ndsOperationCheckpoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.520 NAME 'localReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.521 NAME 'treeReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.522 NAME 'schemaResetLock' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.523 NAME 'modifiedACLEntry' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.524 NAME 'monitoredConnection' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.526 NAME 'localFederationBoundary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.527 NAME 'replicationFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.721 NAME 'ServerEBAEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.716 NAME 'EBATreeConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.722 NAME 'EBAPartitionConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.723 NAME 'EBAServerConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.296 NAME 'loginActivationTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.687 NAME 'UpdateInProgress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.720 NAME 'dsContainerReadyAttrs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.4.400.1 NAME 'edirSchemaFlagVersion' SYNTAX 2.16.840.1.113719.1.1.5.1.0 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NONREMOVABLE '1' X-NDS_HIDDEN '1' X-NDS_READ_FILTERED '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.512 NAME 'indexDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.513 NAME 'ndsStatusRepair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.514 NAME 'ndsStatusExternalReference' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.515 NAME 'ndsStatusObituary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.516 NAME 'ndsStatusSchema' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.517 NAME 'ndsStatusLimber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.511 NAME 'authoritative' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113730.3.1.34 NAME 'ref' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.546 NAME 'CachedAttrsOnExtRefs' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.547 NAME 'ExtRefLastUpdatedTime' SYNTAX 2.16.840.1.113719.1.1.5.1.19 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation X-NDS_PUBLIC_READ '1' X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.688 NAME 'NCPKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.1.4.713 NAME 'UTF8LoginScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.714 NAME 'loginScriptCharset' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.721 NAME 'NDSRightsToMonitor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NEVER_SYNC '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.1.192 NAME 'lDAPLogLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Log Level' X-NDS_UPPER_BOUND '32768' )",
+ "( 2.16.840.1.113719.1.27.4.12 NAME 'lDAPUDPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP UDP Port' X-NDS_UPPER_BOUND '65535' )",
+ "( 2.16.840.1.113719.1.1.4.1.204 NAME 'lDAPLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Log Filename' )",
+ "( 2.16.840.1.113719.1.1.4.1.205 NAME 'lDAPBackupLogFilename' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Backup Log Filename' )",
+ "( 2.16.840.1.113719.1.1.4.1.206 NAME 'lDAPLogSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Log Size Limit' X-NDS_LOWER_BOUND '2048' X-NDS_UPPER_BOUND '-1' )",
+ "( 2.16.840.1.113719.1.1.4.1.194 NAME 'lDAPSearchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Size Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
+ "( 2.16.840.1.113719.1.1.4.1.195 NAME 'lDAPSearchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_NAME 'LDAP Search Time Limit' X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '2147483647' )",
+ "( 2.16.840.1.113719.1.1.4.1.207 NAME 'lDAPSuffix' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Suffix' )",
+ "( 2.16.840.1.113719.1.27.4.70 NAME 'ldapConfigVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.14 NAME 'ldapReferral' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Referral' )",
+ "( 2.16.840.1.113719.1.27.4.73 NAME 'ldapDefaultReferralBehavior' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.23 NAME 'ldapSearchReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:searchReferralUsage' )",
+ "( 2.16.840.1.113719.1.27.4.24 NAME 'lDAPOtherReferralUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'LDAP:otherReferralUsage' )",
+ "( 2.16.840.1.113719.1.27.4.1 NAME 'ldapHostServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Host Server' )",
+ "( 2.16.840.1.113719.1.27.4.2 NAME 'ldapGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Group' )",
+ "( 2.16.840.1.113719.1.27.4.3 NAME 'ldapTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_NAME 'LDAP Screen Level' X-NDS_UPPER_BOUND '32768' )",
+ "( 2.16.840.1.113719.1.27.4.4 NAME 'searchSizeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
+ "( 2.16.840.1.113719.1.27.4.5 NAME 'searchTimeLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{2147483647} SINGLE-VALUE X-NDS_UPPER_BOUND '2147483647' )",
+ "( 2.16.840.1.113719.1.27.4.6 NAME 'ldapServerBindLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Bind Limit' X-NDS_UPPER_BOUND '-1' )",
+ "( 2.16.840.1.113719.1.27.4.7 NAME 'ldapServerIdleTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{4294967295} SINGLE-VALUE X-NDS_NAME 'LDAP Server Idle Timeout' X-NDS_UPPER_BOUND '-1' )",
+ "( 2.16.840.1.113719.1.27.4.8 NAME 'ldapEnableTCP' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable TCP' )",
+ "( 2.16.840.1.113719.1.27.4.10 NAME 'ldapEnableSSL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Enable SSL' )",
+ "( 2.16.840.1.113719.1.27.4.11 NAME 'ldapTCPPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP TCP Port' X-NDS_UPPER_BOUND '65535' )",
+ "( 2.16.840.1.113719.1.27.4.13 NAME 'ldapSSLPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{65535} SINGLE-VALUE X-NDS_NAME 'LDAP SSL Port' X-NDS_UPPER_BOUND '65535' )",
+ "( 2.16.840.1.113719.1.27.4.21 NAME 'filteredReplicaUsage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.22 NAME 'ldapKeyMaterialName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'LDAP:keyMaterialName' )",
+ "( 2.16.840.1.113719.1.27.4.42 NAME 'extensionInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.27.4.45 NAME 'nonStdClientSchemaCompatMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.46 NAME 'sslEnableMutualAuthentication' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.62 NAME 'ldapEnablePSearch' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.63 NAME 'ldapMaximumPSearchOperations' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.64 NAME 'ldapIgnorePSearchLimitsForEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.65 NAME 'ldapTLSTrustedRootContainer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.27.4.66 NAME 'ldapEnableMonitorEvents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.67 NAME 'ldapMaximumMonitorEventsLoad' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.68 NAME 'ldapTLSRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.69 NAME 'ldapTLSVerifyClientCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.71 NAME 'ldapDerefAlias' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.72 NAME 'ldapNonStdAllUserAttrsMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.75 NAME 'ldapBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.79 NAME 'ldapInterfaces' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.27.4.80 NAME 'ldapChainSecureRequired' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.82 NAME 'ldapStdCompliance' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.83 NAME 'ldapDerefAliasOnAuth' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.84 NAME 'ldapGeneralizedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.85 NAME 'ldapPermissiveModify' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.86 NAME 'ldapSSLConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.27.4.15 NAME 'ldapServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server List' )",
+ "( 2.16.840.1.113719.1.27.4.16 NAME 'ldapAttributeMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Attribute Map v11' )",
+ "( 2.16.840.1.113719.1.27.4.17 NAME 'ldapClassMap' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'LDAP Class Map v11' )",
+ "( 2.16.840.1.113719.1.27.4.18 NAME 'ldapAllowClearTextPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'LDAP Allow Clear Text Password' )",
+ "( 2.16.840.1.113719.1.27.4.19 NAME 'ldapAnonymousIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'LDAP Anonymous Identity' )",
+ "( 2.16.840.1.113719.1.27.4.52 NAME 'ldapAttributeList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
+ "( 2.16.840.1.113719.1.27.4.53 NAME 'ldapClassList' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} )",
+ "( 2.16.840.1.113719.1.27.4.56 NAME 'transitionGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.74 NAME 'ldapTransitionBackLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.78 NAME 'ldapLBURPNumWriterThreads' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.27.4.20 NAME 'ldapServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'LDAP Server' )",
+ "( 0.9.2342.19200300.100.1.3 NAME 'mail' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'Internet EMail Address' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113730.3.1.3 NAME 'employeeNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'NSCP:employeeNumber' )",
+ "( 2.16.840.1.113719.1.27.4.76 NAME 'referralExcludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.27.4.77 NAME 'referralIncludeFilter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.5.4.36 NAME 'userCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'userCertificate' X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.37 NAME 'cACertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'cACertificate' X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.40 NAME 'crossCertificatePair' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'crossCertificatePair' X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.58 NAME 'attributeCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.2 NAME 'knowledgeInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
+ "( 2.5.4.14 NAME 'searchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.25{64512} X-NDS_NAME 'searchGuide' )",
+ "( 2.5.4.15 NAME 'businessCategory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
+ "( 2.5.4.21 NAME 'telexNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.52{64512} X-NDS_NAME 'telexNumber' )",
+ "( 2.5.4.22 NAME 'teletexTerminalIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.51{64512} X-NDS_NAME 'teletexTerminalIdentifier' )",
+ "( 2.5.4.24 NAME 'x121Address' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{15} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '15' )",
+ "( 2.5.4.25 NAME 'internationaliSDNNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.36{16} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '16' )",
+ "( 2.5.4.26 NAME 'registeredAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} X-NDS_NAME 'registeredAddress' )",
+ "( 2.5.4.27 NAME 'destinationIndicator' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{128} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '128' )",
+ "( 2.5.4.28 NAME 'preferredDeliveryMethod' SYNTAX 1.3.6.1.4.1.1466.115.121.1.14{64512} SINGLE-VALUE X-NDS_NAME 'preferredDeliveryMethod' )",
+ "( 2.5.4.29 NAME 'presentationAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.43{64512} SINGLE-VALUE X-NDS_NAME 'presentationAddress' )",
+ "( 2.5.4.30 NAME 'supportedApplicationContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.38{64512} X-NDS_NAME 'supportedApplicationContext' )",
+ "( 2.5.4.45 NAME 'x500UniqueIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.6{64512} X-NDS_NAME 'x500UniqueIdentifier' )",
+ "( 2.5.4.46 NAME 'dnQualifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.44{64512} )",
+ "( 2.5.4.47 NAME 'enhancedSearchGuide' SYNTAX 1.3.6.1.4.1.1466.115.121.1.21{64512} X-NDS_NAME 'enhancedSearchGuide' )",
+ "( 2.5.4.48 NAME 'protocolInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.42{64512} X-NDS_NAME 'protocolInformation' )",
+ "( 2.5.4.51 NAME 'houseIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
+ "( 2.5.4.52 NAME 'supportedAlgorithms' SYNTAX 1.3.6.1.4.1.1466.115.121.1.49{64512} X-NDS_NAME 'supportedAlgorithms' )",
+ "( 2.5.4.54 NAME 'dmdName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{32768} X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '32768' )",
+ "( 0.9.2342.19200300.100.1.6 NAME 'roomNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.38 NAME 'associatedName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.5.4.49 NAME 'dn' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.1 NAME 'httpServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.3.4.2 NAME 'httpHostServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.3 NAME 'httpThreadsPerCPU' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.4 NAME 'httpIOBufferSize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.5 NAME 'httpRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.6 NAME 'httpKeepAliveRequestTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.7 NAME 'httpSessionTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.8 NAME 'httpKeyMaterialObject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.9 NAME 'httpTraceLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.10 NAME 'httpAuthRequiresTLS' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.11 NAME 'httpDefaultClearPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.12 NAME 'httpDefaultTLSPort' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.3.4.13 NAME 'httpBindRestrictions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.295 NAME 'emboxConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.54.4.1.1 NAME 'trusteesOfNewObject' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'Trustees Of New Object' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.55.4.1.1 NAME 'newObjectSDSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's DS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.56.4.1.1 NAME 'newObjectSFSRights' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'New Object's FS Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.57.4.1.1 NAME 'setupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.58.4.1.1 NAME 'runSetupScript' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Run Setup Script' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.59.4.1.1 NAME 'membersOfTemplate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Members Of Template' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.60.4.1.1 NAME 'volumeSpaceRestrictions' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'Volume Space Restrictions' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.61.4.1.1 NAME 'setPasswordAfterCreate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'Set Password After Create' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.62.4.1.1 NAME 'homeDirectoryRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_NAME 'Home Directory Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.63.4.1.1 NAME 'newObjectSSelfRights' SYNTAX 2.16.840.1.113719.1.1.5.1.17 X-NDS_NAME 'New Object's Self Rights' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.8.4.1 NAME 'digitalMeID' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.8.4.2 NAME 'assistant' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.8.4.3 NAME 'assistantPhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.4 NAME 'city' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.5 NAME 'company' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.43 NAME 'co' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.6 NAME 'directReports' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 0.9.2342.19200300.100.1.10 NAME 'manager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.8.4.7 NAME 'mailstop' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.41 NAME 'mobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 0.9.2342.19200300.100.1.40 NAME 'personalTitle' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.42 NAME 'pager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.8 NAME 'workforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.9 NAME 'instantMessagingID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.10 NAME 'preferredName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.7 NAME 'photo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.8.4.11 NAME 'jobCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.12 NAME 'siteLocation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.13 NAME 'employeeStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113730.3.1.4 NAME 'employeeType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.14 NAME 'costCenter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.15 NAME 'costCenterDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.16 NAME 'tollFreePhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.17 NAME 'otherPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.18 NAME 'managerWorkforceID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.19 NAME 'jackNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113730.3.1.2 NAME 'departmentNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.20 NAME 'vehicleInformation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.21 NAME 'accessCardNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.32 NAME 'isManager' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.8.4.22 NAME 'homeCity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.23 NAME 'homeEmailAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 1.3.6.1.4.1.1466.101.120.31 NAME 'homeFax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 0.9.2342.19200300.100.1.20 NAME 'homePhone' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.24 NAME 'homeState' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.39 NAME 'homePostalAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.41{64512} )",
+ "( 2.16.840.1.113719.1.8.4.25 NAME 'homeZipCode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.26 NAME 'personalMobile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.8.4.27 NAME 'children' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.28 NAME 'spouse' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.29 NAME 'vendorName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.30 NAME 'vendorAddress' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.8.4.31 NAME 'vendorPhoneNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.50{64512} )",
+ "( 2.16.840.1.113719.1.1.4.1.303 NAME 'dgIdentity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME_VALUE_ACCESS '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.304 NAME 'dgTimeOut' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.305 NAME 'dgAllowUnknown' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.306 NAME 'dgAllowDuplicates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.546 NAME 'allowAliasToAncestor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.39.4.1.1 NAME 'sASSecurityDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Security DN' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.4.1.2 NAME 'sASServiceDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Service DN' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.4.1.3 NAME 'sASSecretStore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:SecretStore' )",
+ "( 2.16.840.1.113719.1.39.4.1.4 NAME 'sASSecretStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Key' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.4.1.5 NAME 'sASSecretStoreData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:SecretStore:Data' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.4.1.6 NAME 'sASPKIStoreKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'SAS:PKIStore:Keys' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.1 NAME 'nDSPKIPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.2 NAME 'nDSPKIPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key' )",
+ "( 2.16.840.1.113719.1.48.4.1.3 NAME 'nDSPKIPublicKeyCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.4 NAME 'nDSPKICertificateChain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.16 NAME 'nDSPKIPublicKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key EC' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.17 NAME 'nDSPKIPrivateKeyEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Private Key EC' )",
+ "( 2.16.840.1.113719.1.48.4.1.18 NAME 'nDSPKIPublicKeyCertificateEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Public Key Certificate EC' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.19 NAME 'crossCertificatePairEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'Cross Certificate Pair EC' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKICertificateChainEC' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:Certificate Chain EC' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.5 NAME 'nDSPKIParentCA' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA' )",
+ "( 2.16.840.1.113719.1.48.4.1.6 NAME 'nDSPKIParentCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'NDSPKI:Parent CA DN' )",
+ "( 2.16.840.1.113719.1.48.4.1.20 NAME 'nDSPKISuiteBMode' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'NDSPKI:SuiteBMode' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.7 NAME 'nDSPKIKeyFile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Key File' )",
+ "( 2.16.840.1.113719.1.48.4.1.8 NAME 'nDSPKISubjectName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Subject Name' )",
+ "( 2.16.840.1.113719.1.48.4.1.11 NAME 'nDSPKIGivenName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Given Name' )",
+ "( 2.16.840.1.113719.1.48.4.1.9 NAME 'nDSPKIKeyMaterialDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Key Material DN' )",
+ "( 2.16.840.1.113719.1.48.4.1.10 NAME 'nDSPKITreeCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:Tree CA DN' )",
+ "( 2.5.4.59 NAME 'cAECCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.12 NAME 'nDSPKIUserCertificateInfo' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'NDSPKI:userCertificateInfo' )",
+ "( 2.16.840.1.113719.1.48.4.1.13 NAME 'nDSPKITrustedRootCertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Trusted Root Certificate' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.14 NAME 'nDSPKINotBefore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not Before' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.15 NAME 'nDSPKINotAfter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:Not After' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.101 NAME 'nDSPKISDKeyServerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'NDSPKI:SD Key Server DN' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.102 NAME 'nDSPKISDKeyStruct' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'NDSPKI:SD Key Struct' )",
+ "( 2.16.840.1.113719.1.48.4.1.103 NAME 'nDSPKISDKeyCert' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key Cert' )",
+ "( 2.16.840.1.113719.1.48.4.1.104 NAME 'nDSPKISDKeyID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'NDSPKI:SD Key ID' )",
+ "( 2.16.840.1.113719.1.39.4.1.105 NAME 'nDSPKIKeystore' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_NAME 'NDSPKI:Keystore' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.4.1.106 NAME 'ndspkiAdditionalRoots' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.2.3 NAME 'masvLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.2.4 NAME 'masvProposedLabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.2.5 NAME 'masvDefaultRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.2.6 NAME 'masvAuthorizedRange' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.2.7 NAME 'masvDomainPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.8 NAME 'masvClearanceNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.9 NAME 'masvLabelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.10 NAME 'masvLabelSecrecyLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.11 NAME 'masvLabelSecrecyCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.12 NAME 'masvLabelIntegrityLevelNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.13 NAME 'masvLabelIntegrityCategoryNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.14 NAME 'masvPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.16 NAME 'masvNDSAttributeLabels' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.31.4.1.15 NAME 'masvPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.2 NAME 'sASLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_NAME 'SAS:Login Sequence' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.8 NAME 'sASLoginPolicyUpdate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy Update' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'sasNMASProductOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.74 NAME 'sasAuditConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.14 NAME 'sASNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.15 NAME 'sASPolicyCredentials' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Policy Credentials' X-NDS_SERVER_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.16 NAME 'sASPolicyMethods' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Methods' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.17 NAME 'sASPolicyObjectVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Policy Object Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.18 NAME 'sASPolicyServiceSubtypes' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Service Subtypes' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.19 NAME 'sASPolicyServices' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Services' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.20 NAME 'sASPolicyUsers' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'SAS:Policy Users' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.21 NAME 'sASAllowNDSPasswordWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'SAS:Allow NDS Password Window' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.9 NAME 'sASMethodIdentifier' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Identifier' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.10 NAME 'sASMethodVendor' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Method Vendor' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.11 NAME 'sASAdvisoryMethodGrade' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Advisory Method Grade' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.12 NAME 'sASVendorSupport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'SAS:Vendor Support' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.13 NAME 'sasCertificateSearchContainers' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.70 NAME 'sasNMASMethodConfigData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.22 NAME 'sASLoginClientMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.23 NAME 'sASLoginServerMethodNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method NetWare' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.24 NAME 'sASLoginClientMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Client Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.25 NAME 'sASLoginServerMethodWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NAME 'SAS:Login Server Method WINNT' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.26 NAME 'sasLoginClientMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.27 NAME 'sasLoginServerMethodSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.28 NAME 'sasLoginClientMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.29 NAME 'sasLoginServerMethodLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.30 NAME 'sasLoginClientMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.31 NAME 'sasLoginServerMethodTru64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.32 NAME 'sasLoginClientMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.33 NAME 'sasLoginServerMethodAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.34 NAME 'sasLoginClientMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.35 NAME 'sasLoginServerMethodHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1000 NAME 'sasLoginClientMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1001 NAME 'sasLoginServerMethods390' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1002 NAME 'sasLoginClientMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1003 NAME 'sasLoginServerMethodLinuxX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1004 NAME 'sasLoginClientMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1005 NAME 'sasLoginServerMethodWinX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1006 NAME 'sasLoginClientMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1007 NAME 'sasLoginServerMethodSolaris64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1008 NAME 'sasLoginClientMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1009 NAME 'sasLoginServerMethodAIX64' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasLoginServerMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasLoginClientMethodSolarisi386' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.78 NAME 'sasUnsignedMethodModules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.79 NAME 'sasServerModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.80 NAME 'sasServerModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.81 NAME 'sasSASLMechanismName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.82 NAME 'sasSASLMechanismEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.83 NAME 'sasClientModuleName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.84 NAME 'sasClientModuleEntryPointName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.36 NAME 'sASLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Method Container DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.37 NAME 'sASLoginPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'SAS:Login Policy DN' X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.63 NAME 'sasPostLoginMethodContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.38 NAME 'rADIUSActiveConnections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Active Connections' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.39 NAME 'rADIUSAgedInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Aged Interval' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.40 NAME 'rADIUSAttributeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Attribute List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.41 NAME 'rADIUSAttributeLists' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Attribute Lists' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.42 NAME 'rADIUSClient' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Client' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.43 NAME 'rADIUSCommonNameResolution' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Common Name Resolution' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.44 NAME 'rADIUSConcurrentLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Concurrent Limit' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.45 NAME 'rADIUSConnectionHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Connection History' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.46 NAME 'rADIUSDASVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:DAS Version' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.47 NAME 'rADIUSDefaultProfile' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Default Profile' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.48 NAME 'rADIUSDialAccessGroup' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'RADIUS:Dial Access Group' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.49 NAME 'rADIUSEnableCommonNameLogin' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Common Name Login' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.50 NAME 'rADIUSEnableDialAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NAME 'RADIUS:Enable Dial Access' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.51 NAME 'rADIUSInterimAcctingTimeout' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Interim Accting Timeout' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.52 NAME 'rADIUSLookupContexts' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'RADIUS:Lookup Contexts' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.53 NAME 'rADIUSMaxDASHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Max DAS History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.54 NAME 'rADIUSMaximumHistoryRecord' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Maximum History Record' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.55 NAME 'rADIUSPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Password' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.56 NAME 'rADIUSPasswordPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'RADIUS:Password Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.57 NAME 'rADIUSPrivateKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Private Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.58 NAME 'rADIUSProxyContext' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Proxy Context' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.59 NAME 'rADIUSProxyDomain' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Domain' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.60 NAME 'rADIUSProxyTarget' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'RADIUS:Proxy Target' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.61 NAME 'rADIUSPublicKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'RADIUS:Public Key' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.62 NAME 'rADIUSServiceList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_NAME 'RADIUS:Service List' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.3 NAME 'sASLoginSecret' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.4 NAME 'sASLoginSecretKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Secret Key' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.5 NAME 'sASEncryptionType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'SAS:Encryption Type' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.6 NAME 'sASLoginConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.7 NAME 'sASLoginConfigurationKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'SAS:Login Configuration Key' X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.73 NAME 'sasDefaultLoginSequence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.64 NAME 'sasAuthorizedLoginSequences' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.69 NAME 'sasAllowableSubjectNames' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.71 NAME 'sasLoginFailureDelay' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.72 NAME 'sasMethodVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1010 NAME 'sasUpdateLoginInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1011 NAME 'sasOTPEnabled' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1012 NAME 'sasOTPCounter' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1013 NAME 'sasOTPLookAheadWindow' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1014 NAME 'sasOTPDigits' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1015 NAME 'sasOTPReSync' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.39.42.1.0.1016 NAME 'sasUpdateLoginTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.6.4.1 NAME 'snmpGroupDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.6.4.2 NAME 'snmpServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.6.4.3 NAME 'snmpTrapConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.6.4.4 NAME 'snmpTrapDescription' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.6.4.5 NAME 'snmpTrapInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.6.4.6 NAME 'snmpTrapDisable' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.528 NAME 'ndapPartitionPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.529 NAME 'ndapClassPasswordMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.530 NAME 'ndapPasswordMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.537 NAME 'ndapPartitionLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.538 NAME 'ndapClassLoginMgmt' SYNTAX 2.16.840.1.113719.1.1.5.1.0 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.539 NAME 'ndapLoginMgmt' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.1 NAME 'nspmPasswordKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.2 NAME 'nspmPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.3 NAME 'nspmDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.4 NAME 'nspmPasswordHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.5 NAME 'nspmAdministratorChangeCount' SYNTAX 2.16.840.1.113719.1.1.5.1.22 SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.6 NAME 'nspmPasswordPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.7 NAME 'nspmPreviousDistributionPassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.8 NAME 'nspmDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 1.3.6.1.4.1.42.2.27.8.1.16 NAME 'pwdChangedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 1.3.6.1.4.1.42.2.27.8.1.17 NAME 'pwdAccountLockedTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 1.3.6.1.4.1.42.2.27.8.1.19 NAME 'pwdFailureTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 NO-USER-MODIFICATION USAGE directoryOperation )",
+ "( 2.16.840.1.113719.1.39.43.4.100 NAME 'nspmConfigurationOptions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.102 NAME 'nspmChangePasswordMessage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.103 NAME 'nspmPasswordHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.104 NAME 'nspmPasswordHistoryExpiration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 1.3.6.1.4.1.42.2.27.8.1.4 NAME 'pwdInHistory' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.105 NAME 'nspmMinPasswordLifetime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.106 NAME 'nspmAdminsDoNotExpirePassword' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.107 NAME 'nspmPasswordACL' SYNTAX 2.16.840.1.113719.1.1.5.1.17 )",
+ "( 2.16.840.1.113719.1.39.43.4.200 NAME 'nspmMaximumLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.201 NAME 'nspmMinUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.202 NAME 'nspmMaxUpperCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.203 NAME 'nspmMinLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.204 NAME 'nspmMaxLowerCaseCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.205 NAME 'nspmNumericCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.206 NAME 'nspmNumericAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.207 NAME 'nspmNumericAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.208 NAME 'nspmMinNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.209 NAME 'nspmMaxNumericCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.210 NAME 'nspmSpecialCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.211 NAME 'nspmSpecialAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.212 NAME 'nspmSpecialAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.213 NAME 'nspmMinSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.214 NAME 'nspmMaxSpecialCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.215 NAME 'nspmMaxRepeatedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.216 NAME 'nspmMaxConsecutiveCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.217 NAME 'nspmMinUniqueCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.218 NAME 'nspmDisallowedAttributeValues' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.219 NAME 'nspmExcludeList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.220 NAME 'nspmCaseSensitive' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.221 NAME 'nspmPolicyPrecedence' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.222 NAME 'nspmExtendedCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.223 NAME 'nspmExtendedAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.224 NAME 'nspmExtendedAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.225 NAME 'nspmMinExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.226 NAME 'nspmMaxExtendedCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.227 NAME 'nspmUpperAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.228 NAME 'nspmUpperAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.229 NAME 'nspmLowerAsFirstCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.230 NAME 'nspmLowerAsLastCharacter' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.231 NAME 'nspmComplexityRules' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.233 NAME 'nspmAD2K8Syntax' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.234 NAME 'nspmAD2K8maxViolation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.235 NAME 'nspmXCharLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.236 NAME 'nspmXCharHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.237 NAME 'nspmUnicodeAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.238 NAME 'nspmNonAlphaCharactersAllowed' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.239 NAME 'nspmMinNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.240 NAME 'nspmMaxNonAlphaCharacters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.241 NAME 'nspmGraceLoginHistoryLimit' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.300 NAME 'nspmPolicyAgentContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.301 NAME 'nspmPolicyAgentNetWare' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.302 NAME 'nspmPolicyAgentWINNT' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.303 NAME 'nspmPolicyAgentSolaris' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.304 NAME 'nspmPolicyAgentLinux' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.305 NAME 'nspmPolicyAgentAIX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.43.4.306 NAME 'nspmPolicyAgentHPUX' SYNTAX 1.3.6.1.4.1.1466.115.121.1.5 SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 0.9.2342.19200300.100.1.55 NAME 'audio' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113730.3.1.1 NAME 'carLicense' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113730.3.1.241 NAME 'displayName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.60 NAME 'jpegPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 1.3.6.1.4.1.250.1.57 NAME 'labeledUri' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 0.9.2342.19200300.100.1.7 NAME 'ldapPhoto' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113730.3.1.39 NAME 'preferredLanguage' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
+ "( 0.9.2342.19200300.100.1.21 NAME 'secretary' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113730.3.1.40 NAME 'userSMIMECertificate' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113730.3.1.216 NAME 'userPKCS12' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.12.4.1.0 NAME 'auditAEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:A Encryption Key' )",
+ "( 2.16.840.1.113719.1.12.4.2.0 NAME 'auditBEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:B Encryption Key' )",
+ "( 2.16.840.1.113719.1.12.4.3.0 NAME 'auditContents' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Contents' )",
+ "( 2.16.840.1.113719.1.12.4.4.0 NAME 'auditType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'Audit:Type' )",
+ "( 2.16.840.1.113719.1.12.4.5.0 NAME 'auditCurrentEncryptionKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Current Encryption Key' )",
+ "( 2.16.840.1.113719.1.12.4.6.0 NAME 'auditFileLink' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'Audit:File Link' )",
+ "( 2.16.840.1.113719.1.12.4.7.0 NAME 'auditLinkList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NAME 'Audit:Link List' )",
+ "( 2.16.840.1.113719.1.12.4.8.0 NAME 'auditPath' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Path' )",
+ "( 2.16.840.1.113719.1.12.4.9.0 NAME 'auditPolicy' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_NAME 'Audit:Policy' )",
+ "( 2.16.840.1.113719.1.38.4.1.1 NAME 'wANMANWANPolicy' SYNTAX 2.16.840.1.113719.1.1.5.1.13{64512} X-NDS_NAME 'WANMAN:WAN Policy' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.38.4.1.2 NAME 'wANMANLANAreaMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NAME 'WANMAN:LAN Area Membership' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.38.4.1.3 NAME 'wANMANCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_NAME 'WANMAN:Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.38.4.1.4 NAME 'wANMANDefaultCost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NAME 'WANMAN:Default Cost' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.135.4.30 NAME 'rbsAssignedRoles' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.31 NAME 'rbsContent' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.32 NAME 'rbsContentMembership' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.33 NAME 'rbsEntryPoint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.34 NAME 'rbsMember' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.35 NAME 'rbsOwnedCollections' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.135.4.36 NAME 'rbsPath' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.37 NAME 'rbsParameters' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
+ "( 2.16.840.1.113719.1.135.4.38 NAME 'rbsTaskRights' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.135.4.39 NAME 'rbsTrusteeOf' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.40 NAME 'rbsType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{256} SINGLE-VALUE X-NDS_LOWER_BOUND '1' X-NDS_UPPER_BOUND '256' )",
+ "( 2.16.840.1.113719.1.135.4.41 NAME 'rbsURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.42 NAME 'rbsTaskTemplates' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.135.4.43 NAME 'rbsTaskTemplatesURL' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.44 NAME 'rbsGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.45 NAME 'rbsPageMembership' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} )",
+ "( 2.16.840.1.113719.1.135.4.46 NAME 'rbsTargetObjectType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.135.4.47 NAME 'rbsContext' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.48 NAME 'rbsXMLInfo' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.135.4.51 NAME 'rbsAssignedRoles2' SYNTAX 2.16.840.1.113719.1.1.5.1.25 )",
+ "( 2.16.840.1.113719.1.135.4.52 NAME 'rbsOwnedCollections2' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.1.4.1.540 NAME 'prSyncPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.541 NAME 'prSyncAttributes' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_SERVER_READ '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.542 NAME 'dsEncryptedReplicationConfig' SYNTAX 2.16.840.1.113719.1.1.5.1.19 )",
+ "( 2.16.840.1.113719.1.1.4.1.543 NAME 'encryptionPolicyDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.544 NAME 'attrEncryptionRequiresSecure' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.545 NAME 'attrEncryptionDefinition' SYNTAX 2.16.840.1.113719.1.1.5.1.6{64512} X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.16 NAME 'ndspkiCRLFileName' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.17 NAME 'ndspkiStatus' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.18 NAME 'ndspkiIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.19 NAME 'ndspkiNextIssueTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.20 NAME 'ndspkiAttemptTime' SYNTAX 1.3.6.1.4.1.1466.115.121.1.24 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.21 NAME 'ndspkiTimeInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.22 NAME 'ndspkiCRLMaxProcessingInterval' SYNTAX 2.16.840.1.113719.1.1.5.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.23 NAME 'ndspkiCRLNumber' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.24 NAME 'ndspkiDistributionPoints' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.25 NAME 'ndspkiCRLProcessData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.26 NAME 'ndspkiCRLConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.27 NAME 'ndspkiCADN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.28 NAME 'ndspkiCRLContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.29 NAME 'ndspkiIssuedCertContainerDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.30 NAME 'ndspkiDistributionPointDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.31 NAME 'ndspkiCRLConfigurationDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.32 NAME 'ndspkiDirectory' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} )",
+ "( 2.5.4.38 NAME 'authorityRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiAuthorityRevocationList' X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.39 NAME 'certificateRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiCertificateRevocationList' X-NDS_PUBLIC_READ '1' )",
+ "( 2.5.4.53 NAME 'deltaRevocationList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40 SINGLE-VALUE X-NDS_NAME 'ndspkiDeltaRevocationList' X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.36 NAME 'ndspkiTrustedRootList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.37 NAME 'ndspkiSecurityRightsLevel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.48.4.1.38 NAME 'ndspkiKMOExport' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.48.4.1.39 NAME 'ndspkiCRLECConfigurationDNList' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.40 NAME 'ndspkiCRLType' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.41 NAME 'ndspkiCRLExtendValidity' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.42 NAME 'ndspkiDefaultRSAKeySize' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.43 NAME 'ndspkiDefaultECCurve' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.48.4.1.44 NAME 'ndspkiDefaultCertificateLife' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.7.4.1 NAME 'notfSMTPEmailHost' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.7.4.2 NAME 'notfSMTPEmailFrom' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.7.4.3 NAME 'notfSMTPEmailUserName' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.7.4.5 NAME 'notfMergeTemplateData' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.7.4.6 NAME 'notfMergeTemplateSubject' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.1 NAME 'nsimRequiredQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.2 NAME 'nsimRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.3 NAME 'nsimNumberRandomQuestions' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.4 NAME 'nsimMinResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.5 NAME 'nsimMaxResponseLength' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.6 NAME 'nsimForgottenLoginConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.7 NAME 'nsimForgottenAction' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.8 NAME 'nsimAssignments' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.9 NAME 'nsimChallengeSetDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.10 NAME 'nsimChallengeSetGUID' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.11 NAME 'nsimPwdRuleEnforcement' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.12 NAME 'nsimHint' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.39.44.4.13 NAME 'nsimPasswordReminder' SYNTAX 1.3.6.1.4.1.1466.115.121.1.26{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.4 NAME 'sssProxyStoreKey' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.266.4.5 NAME 'sssProxyStoreSecrets' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} USAGE directoryOperation X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.266.4.6 NAME 'sssActiveServerList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.266.4.7 NAME 'sssCacheRefreshInterval' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.8 NAME 'sssAdminList' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.266.4.9 NAME 'sssAdminGALabel' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.10 NAME 'sssEnableReadTimestamps' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.11 NAME 'sssDisableMasterPasswords' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.12 NAME 'sssEnableAdminAccess' SYNTAX 1.3.6.1.4.1.1466.115.121.1.7 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.266.4.13 NAME 'sssReadSecretPolicies' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} )",
+ "( 2.16.840.1.113719.1.266.4.14 NAME 'sssServerPolicyOverrideDN' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.1.531 NAME 'eDirCloneSource' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.532 NAME 'eDirCloneKeys' SYNTAX 1.3.6.1.4.1.1466.115.121.1.40{64512} NO-USER-MODIFICATION USAGE directoryOperation X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' X-NDS_HIDDEN '1' )",
+ "( 2.16.840.1.113719.1.1.4.1.533 NAME 'eDirCloneLock' SYNTAX 2.16.840.1.113719.1.1.5.1.15{64512} SINGLE-VALUE X-NDS_NOT_SCHED_SYNC_IMMEDIATE '1' )",
+ "( 2.16.840.1.113719.1.1.4.711 NAME 'groupMember' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 )",
+ "( 2.16.840.1.113719.1.1.4.712 NAME 'nestedConfig' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27 SINGLE-VALUE )",
+ "( 2.16.840.1.113719.1.1.4.717 NAME 'xdasDSConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.1.4.718 NAME 'xdasConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.1.4.719 NAME 'xdasVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )",
+ "( 2.16.840.1.113719.1.347.4.79 NAME 'NAuditInstrumentation' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.347.4.2 NAME 'NAuditLoggingServer' SYNTAX 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_PUBLIC_READ '1' )",
+ "( 2.16.840.1.113719.1.1.4.724 NAME 'cefConfiguration' SYNTAX 1.3.6.1.4.1.1466.115.121.1.15{64512} )",
+ "( 2.16.840.1.113719.1.1.4.725 NAME 'cefVersion' SYNTAX 1.3.6.1.4.1.1466.115.121.1.27{32768} SINGLE-VALUE X-NDS_UPPER_BOUND '32768' )"
+ ],
+ "createTimestamp": [],
+ "dITContentRules": [],
+ "dITStructureRules": [],
+ "ldapSyntaxes": [
+ "( 1.3.6.1.4.1.1466.115.121.1.1 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.2 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.3 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.4 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.5 X-NDS_SYNTAX '21' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.6 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.7 X-NDS_SYNTAX '7' )",
+ "( 2.16.840.1.113719.1.1.5.1.6 X-NDS_SYNTAX '6' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.8 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.9 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.10 X-NDS_SYNTAX '9' )",
+ "( 2.16.840.1.113719.1.1.5.1.22 X-NDS_SYNTAX '22' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.11 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.12 X-NDS_SYNTAX '1' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.13 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.14 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.15 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.16 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.17 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.18 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.19 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.20 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.21 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.22 X-NDS_SYNTAX '11' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.23 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.24 X-NDS_SYNTAX '24' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.25 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.26 X-NDS_SYNTAX '2' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.27 X-NDS_SYNTAX '8' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.28 X-NDS_SYNTAX '9' )",
+ "( 1.2.840.113556.1.4.906 X-NDS_SYNTAX '29' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.54 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.56 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.57 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.29 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.30 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.31 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.32 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.33 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.55 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.34 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.35 X-NDS_SYNTAX '3' )",
+ "( 2.16.840.1.113719.1.1.5.1.19 X-NDS_SYNTAX '19' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.36 X-NDS_SYNTAX '5' )",
+ "( 2.16.840.1.113719.1.1.5.1.17 X-NDS_SYNTAX '17' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.37 X-NDS_SYNTAX '3' )",
+ "( 2.16.840.1.113719.1.1.5.1.13 X-NDS_SYNTAX '13' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.40 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.38 X-NDS_SYNTAX '20' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.39 X-NDS_SYNTAX '3' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.41 X-NDS_SYNTAX '18' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.43 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.44 X-NDS_SYNTAX '4' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.42 X-NDS_SYNTAX '9' )",
+ "( 2.16.840.1.113719.1.1.5.1.16 X-NDS_SYNTAX '16' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.58 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.45 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.46 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.47 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.48 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.49 X-NDS_SYNTAX '9' )",
+ "( 2.16.840.1.113719.1.1.5.1.12 X-NDS_SYNTAX '12' )",
+ "( 2.16.840.1.113719.1.1.5.1.23 X-NDS_SYNTAX '23' )",
+ "( 2.16.840.1.113719.1.1.5.1.15 X-NDS_SYNTAX '15' )",
+ "( 2.16.840.1.113719.1.1.5.1.14 X-NDS_SYNTAX '14' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.50 X-NDS_SYNTAX '10' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.51 X-NDS_SYNTAX '9' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.52 X-NDS_SYNTAX '9' )",
+ "( 2.16.840.1.113719.1.1.5.1.25 X-NDS_SYNTAX '25' )",
+ "( 1.3.6.1.4.1.1466.115.121.1.53 X-NDS_SYNTAX '9' )",
+ "( 2.16.840.1.113719.1.1.5.1.26 X-NDS_SYNTAX '26' )",
+ "( 2.16.840.1.113719.1.1.5.1.27 X-NDS_SYNTAX '27' )"
+ ],
+ "matchingRuleUse": [],
+ "matchingRules": [],
+ "modifyTimestamp": [
+ "20190831135835Z"
+ ],
+ "nameForms": [],
+ "objectClass": [
+ "top",
+ "subschema"
+ ],
+ "objectClasses": [
+ "( 2.5.6.0 NAME 'Top' STRUCTURAL MUST objectClass MAY ( cAPublicKey $ cAPrivateKey $ certificateValidityInterval $ authorityRevocation $ lastReferencedTime $ equivalentToMe $ ACL $ backLink $ binderyProperty $ Obituary $ Reference $ revision $ ndsCrossCertificatePair $ certificateRevocation $ usedBy $ GUID $ otherGUID $ DirXML-Associations $ creatorsName $ modifiersName $ objectVersion $ auxClassCompatibility $ unknownBaseClass $ unknownAuxiliaryClass $ masvProposedLabel $ masvDefaultRange $ masvAuthorizedRange $ auditFileLink $ rbsAssignedRoles $ rbsOwnedCollections $ rbsAssignedRoles2 $ rbsOwnedCollections2 ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '16#subtree#[Creator]#[Entry Rights]' )",
+ "( 1.3.6.1.4.1.42.2.27.1.2.1 NAME 'aliasObject' SUP Top STRUCTURAL MUST aliasedObjectName X-NDS_NAME 'Alias' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.2 NAME 'Country' SUP Top STRUCTURAL MUST c MAY ( description $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING 'c' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'domain' ) X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.3 NAME 'Locality' SUP Top STRUCTURAL MAY ( description $ l $ seeAlso $ st $ street $ searchGuide $ sssActiveServerList $ sssServerPolicyOverrideDN ) X-NDS_NAMING ( 'l' 'st' ) X-NDS_CONTAINMENT ( 'Country' 'organizationalUnit' 'Locality' 'Organization' 'domain' ) X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.4 NAME 'Organization' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST o MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'o' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'domain' ) X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
+ "( 2.5.6.5 NAME 'organizationalUnit' SUP ( ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST ou MAY ( description $ facsimileTelephoneNumber $ l $ loginScript $ eMailAddress $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ printJobConfiguration $ printerControl $ seeAlso $ st $ street $ telephoneNumber $ loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ nNSDomain $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber $ businessCategory $ searchGuide $ rADIUSAttributeLists $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSServiceList $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'ou' X-NDS_CONTAINMENT ( 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Unit' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Self]#loginScript' '2#entry#[Self]#printJobConfiguration') )",
+ "( 2.5.6.8 NAME 'organizationalRole' SUP Top STRUCTURAL MUST cn MAY ( description $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ roleOccupant $ seeAlso $ st $ street $ telephoneNumber $ mailboxLocation $ mailboxID $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ internationaliSDNNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Role' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.9 NAME ( 'groupOfNames' 'group' 'groupOfUniqueNames' ) SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ owner $ seeAlso $ groupID $ fullName $ eMailAddress $ mailboxLocation $ mailboxID $ Profile $ profileMembership $ loginScript $ businessCategory $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.6 NAME 'Person' SUP ndsLoginProperties STRUCTURAL MUST ( cn $ sn ) MAY ( description $ seeAlso $ telephoneNumber $ fullName $ givenName $ initials $ generationQualifier $ uid $ assistant $ assistantPhone $ city $ st $ company $ co $ directReports $ manager $ mailstop $ mobile $ personalTitle $ pager $ workforceID $ instantMessagingID $ preferredName $ photo $ jobCode $ siteLocation $ employeeStatus $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ otherPhoneNumber $ managerWorkforceID $ roomNumber $ jackNumber $ departmentNumber $ vehicleInformation $ accessCardNumber $ isManager $ userPassword ) X-NDS_NAMING ( 'cn' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.5.6.7 NAME 'organizationalPerson' SUP Person STRUCTURAL MAY ( facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ mailboxLocation $ mailboxID $ uid $ mail $ employeeNumber $ destinationIndicator $ internationaliSDNNumber $ preferredDeliveryMethod $ registeredAddress $ teletexTerminalIdentifier $ telexNumber $ x121Address $ businessCategory $ roomNumber $ x500UniqueIdentifier ) X-NDS_NAMING ( 'cn' 'ou' 'uid' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Organizational Person' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113730.3.2.2 NAME 'inetOrgPerson' SUP organizationalPerson STRUCTURAL MAY ( groupMembership $ ndsHomeDirectory $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ printJobConfiguration $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ messageServer $ Language $ ndsUID $ lockedByIntruder $ serverHolds $ lastLoginTime $ typeCreatorMap $ higherPrivileges $ printerControl $ securityFlags $ profileMembership $ Timezone $ sASServiceDN $ sASSecretStore $ sASSecretStoreKey $ sASSecretStoreData $ sASPKIStoreKeys $ userCertificate $ nDSPKIUserCertificateInfo $ nDSPKIKeystore $ rADIUSActiveConnections $ rADIUSAttributeLists $ rADIUSConcurrentLimit $ rADIUSConnectionHistory $ rADIUSDefaultProfile $ rADIUSDialAccessGroup $ rADIUSEnableDialAccess $ rADIUSPassword $ rADIUSServiceList $ audio $ businessCategory $ carLicense $ departmentNumber $ employeeNumber $ employeeType $ displayName $ givenName $ homePhone $ homePostalAddress $ initials $ jpegPhoto $ labeledUri $ mail $ manager $ mobile $ o $ pager $ ldapPhoto $ preferredLanguage $ roomNumber $ secretary $ uid $ userSMIMECertificate $ x500UniqueIdentifier $ userPKCS12 $ sssProxyStoreKey $ sssProxyStoreSecrets $ sssServerPolicyOverrideDN ) X-NDS_NAME 'User' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#loginScript' '1#subtree#[Root Template]#[Entry Rights]' '2#entry#[Public]#messageServer' '2#entry#[Root Template]#groupMembership' '6#entry#[Self]#printJobConfiguration' '2#entry#[Root Template]#networkAddress') )",
+ "( 2.5.6.14 NAME 'Device' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ networkAddress $ ou $ o $ owner $ seeAlso $ serialNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.4 NAME 'Computer' SUP Device STRUCTURAL MAY ( operator $ server $ status ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.17 NAME 'Printer' SUP Device STRUCTURAL MAY ( Cartridge $ printerConfiguration $ defaultQueue $ hostDevice $ printServer $ Memory $ networkAddressRestriction $ notify $ operator $ pageDescriptionLanguage $ queue $ status $ supportedTypefaces ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.21 NAME 'Resource' SUP Top ABSTRACT MUST cn MAY ( description $ hostResourceName $ l $ ou $ o $ seeAlso $ Uses ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.20 NAME 'Queue' SUP Resource STRUCTURAL MUST queueDirectory MAY ( Device $ operator $ server $ User $ networkAddress $ Volume $ hostServer ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
+ "( 2.16.840.1.113719.1.1.6.1.3 NAME 'binderyQueue' SUP Queue STRUCTURAL MUST binderyType X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_NAME 'Bindery Queue' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
+ "( 2.16.840.1.113719.1.1.6.1.26 NAME 'Volume' SUP Resource STRUCTURAL MUST hostServer MAY status X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Root Template]#hostResourceName' '2#entry#[Root Template]#hostServer') )",
+ "( 2.16.840.1.113719.1.1.6.1.7 NAME 'directoryMap' SUP Resource STRUCTURAL MUST hostServer MAY path X-NDS_NAME 'Directory Map' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.19 NAME 'Profile' SUP Top STRUCTURAL MUST ( cn $ loginScript ) MAY ( description $ l $ ou $ o $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.22 NAME 'Server' SUP Top ABSTRACT MUST cn MAY ( description $ hostDevice $ l $ ou $ o $ privateKey $ publicKey $ Resource $ seeAlso $ status $ User $ Version $ networkAddress $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ fullName $ securityEquals $ securityFlags $ Timezone $ ndapClassPasswordMgmt $ ndapClassLoginMgmt ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '2#entry#[Public]#networkAddress' '16#subtree#[Self]#[Entry Rights]') )",
+ "( 2.16.840.1.113719.1.1.6.1.10 NAME 'ncpServer' SUP Server STRUCTURAL MAY ( operator $ supportedServices $ messagingServer $ dsRevision $ permanentConfigParms $ ndsPredicateStatsDN $ languageId $ indexDefinition $ CachedAttrsOnExtRefs $ NCPKeyMaterialName $ NDSRightsToMonitor $ ldapServerDN $ httpServerDN $ emboxConfig $ sASServiceDN $ cACertificate $ cAECCertificate $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKICertificateChain $ nDSPKIParentCADN $ nDSPKISDKeyID $ nDSPKISDKeyStruct $ snmpGroupDN $ wANMANWANPolicy $ wANMANLANAreaMembership $ wANMANCost $ wANMANDefaultCost $ encryptionPolicyDN $ eDirCloneSource $ eDirCloneLock $ xdasDSConfiguration $ xdasConfiguration $ xdasVersion $ NAuditLoggingServer $ NAuditInstrumentation $ cefConfiguration $ cefVersion ) X-NDS_NAME 'NCP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#messagingServer' )",
+ "( 2.16.840.1.113719.1.1.6.1.18 NAME 'printServer' SUP Server STRUCTURAL MAY ( operator $ printer $ sAPName ) X-NDS_NAME 'Print Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#subtree#[Root Template]#[All Attributes Rights]' )",
+ "( 2.16.840.1.113719.1.1.6.1.31 NAME 'CommExec' SUP Server STRUCTURAL MAY networkAddressRestriction X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.2 NAME 'binderyObject' SUP Top STRUCTURAL MUST ( binderyObjectRestriction $ binderyType $ cn ) X-NDS_NAMING ( 'cn' 'binderyType' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'Bindery Object' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.15 NAME 'Partition' AUXILIARY MAY ( Convergence $ partitionCreationTime $ Replica $ inheritedACL $ lowConvergenceSyncInterval $ receivedUpTo $ synchronizedUpTo $ authorityRevocation $ certificateRevocation $ cAPrivateKey $ cAPublicKey $ ndsCrossCertificatePair $ lowConvergenceResetTime $ highConvergenceSyncInterval $ partitionControl $ replicaUpTo $ partitionStatus $ transitiveVector $ purgeVector $ synchronizationTolerance $ obituaryNotify $ localReceivedUpTo $ federationControl $ syncPanePoint $ syncWindowVector $ EBAPartitionConfiguration $ authoritative $ allowAliasToAncestor $ sASSecurityDN $ masvLabel $ ndapPartitionPasswordMgmt $ ndapPartitionLoginMgmt $ prSyncPolicyDN $ dsEncryptedReplicationConfig ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.0 NAME 'aFPServer' SUP Server STRUCTURAL MAY ( serialNumber $ supportedConnections ) X-NDS_NAME 'AFP Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.27 NAME 'messagingServer' SUP Server STRUCTURAL MAY ( messagingDatabaseLocation $ messageRoutingGroup $ Postmaster $ supportedServices $ messagingServerType $ supportedGateway ) X-NDS_NAME 'Messaging Server' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]' '6#entry#[Self]#status' '2#entry#[Public]#messagingServerType' '2#entry#[Public]#messagingDatabaseLocation') )",
+ "( 2.16.840.1.113719.1.1.6.1.28 NAME 'messageRoutingGroup' SUP groupOfNames STRUCTURAL X-NDS_NAME 'Message Routing Group' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES ( '1#subtree#[Self]#[Entry Rights]' '2#subtree#[Self]#[All Attributes Rights]') )",
+ "( 2.16.840.1.113719.1.1.6.1.29 NAME 'externalEntity' SUP Top STRUCTURAL MUST cn MAY ( description $ seeAlso $ facsimileTelephoneNumber $ l $ eMailAddress $ ou $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ st $ street $ title $ externalName $ mailboxLocation $ mailboxID ) X-NDS_NAMING ( 'cn' 'ou' ) X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'External Entity' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Public]#externalName' )",
+ "( 2.16.840.1.113719.1.1.6.1.30 NAME 'List' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ ou $ o $ eMailAddress $ mailboxLocation $ mailboxID $ owner $ seeAlso $ fullName ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' X-NDS_ACL_TEMPLATES '2#entry#[Root Template]#member' )",
+ "( 2.16.840.1.113719.1.1.6.1.32 NAME 'treeRoot' SUP Top STRUCTURAL MUST T MAY ( EBATreeConfiguration $ sssActiveServerList ) X-NDS_NAMING 'T' X-NDS_NAME 'Tree Root' X-NDS_NONREMOVABLE '1' )",
+ "( 0.9.2342.19200300.100.4.13 NAME 'domain' SUP ( Top $ ndsLoginProperties $ ndsContainerLoginProperties ) STRUCTURAL MUST dc MAY ( searchGuide $ o $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ associatedName $ description $ sssActiveServerList $ sssServerPolicyOverrideDN $ userPassword ) X-NDS_NAMING 'dc' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NONREMOVABLE '1' )",
+ "( 1.3.6.1.4.1.1466.344 NAME 'dcObject' AUXILIARY MUST dc X-NDS_NAMING 'dc' X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.33 NAME 'ndsLoginProperties' SUP Top ABSTRACT MAY ( groupMembership $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginGraceRemaining $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginScript $ loginTime $ networkAddressRestriction $ networkAddress $ passwordsUsed $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ privateKey $ Profile $ publicKey $ securityEquals $ accountBalance $ allowUnlimitedCredit $ minimumAccountBalance $ Language $ lockedByIntruder $ serverHolds $ lastLoginTime $ higherPrivileges $ securityFlags $ profileMembership $ Timezone $ loginActivationTime $ UTF8LoginScript $ loginScriptCharset $ sASNDSPasswordWindow $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAllowableSubjectNames $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPCounter $ sasOTPDigits $ sasOTPReSync $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordKey $ nspmPassword $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ nsimHint $ nsimPasswordReminder $ userPassword ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.141.6.1 NAME 'federationBoundary' AUXILIARY MUST federationBoundaryType MAY ( federationControl $ federationDNSName $ federationSearchPath ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.34 NAME 'ndsContainerLoginProperties' SUP Top ABSTRACT MAY ( loginIntruderLimit $ intruderAttemptResetInterval $ detectIntruder $ lockoutAfterDetection $ intruderLockoutResetInterval $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPDigits $ sasUpdateLoginTimeInterval $ ndapPasswordMgmt $ ndapLoginMgmt $ nspmPasswordPolicyDN ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.27.6.3 NAME 'ndsPredicateStats' SUP Top STRUCTURAL MUST ( cn $ ndsPredicateState $ ndsPredicateFlush ) MAY ( ndsPredicate $ ndsPredicateTimeout $ ndsPredicateUseValues ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.400.1 NAME 'edirSchemaVersion' SUP Top ABSTRACT MAY edirSchemaFlagVersion X-NDS_NOT_CONTAINER '1' X-NDS_NONREMOVABLE '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.47 NAME 'immediateSuperiorReference' AUXILIARY MAY ref X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.27.6.1 NAME 'ldapServer' SUP Top STRUCTURAL MUST cn MAY ( ldapHostServer $ ldapGroupDN $ ldapTraceLevel $ ldapServerBindLimit $ ldapServerIdleTimeout $ lDAPUDPPort $ lDAPSearchSizeLimit $ lDAPSearchTimeLimit $ lDAPLogLevel $ lDAPLogFilename $ lDAPBackupLogFilename $ lDAPLogSizeLimit $ Version $ searchSizeLimit $ searchTimeLimit $ ldapEnableTCP $ ldapTCPPort $ ldapEnableSSL $ ldapSSLPort $ ldapKeyMaterialName $ filteredReplicaUsage $ extensionInfo $ nonStdClientSchemaCompatMode $ sslEnableMutualAuthentication $ ldapEnablePSearch $ ldapMaximumPSearchOperations $ ldapIgnorePSearchLimitsForEvents $ ldapTLSTrustedRootContainer $ ldapEnableMonitorEvents $ ldapMaximumMonitorEventsLoad $ ldapTLSRequired $ ldapTLSVerifyClientCertificate $ ldapConfigVersion $ ldapDerefAlias $ ldapNonStdAllUserAttrsMode $ ldapBindRestrictions $ ldapDefaultReferralBehavior $ ldapReferral $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ ldapLBURPNumWriterThreads $ ldapInterfaces $ ldapChainSecureRequired $ ldapStdCompliance $ ldapDerefAliasOnAuth $ ldapGeneralizedTime $ ldapPermissiveModify $ ldapSSLConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Server' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.27.6.2 NAME 'ldapGroup' SUP Top STRUCTURAL MUST cn MAY ( ldapReferral $ ldapServerList $ ldapAllowClearTextPassword $ ldapAnonymousIdentity $ lDAPSuffix $ ldapAttributeMap $ ldapClassMap $ ldapSearchReferralUsage $ lDAPOtherReferralUsage $ transitionGroupDN $ ldapAttributeList $ ldapClassList $ ldapConfigVersion $ Version $ ldapDefaultReferralBehavior $ ldapTransitionBackLink $ ldapSSLConfig $ referralIncludeFilter $ referralExcludeFilter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) X-NDS_NAME 'LDAP Group' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.22 NAME 'pkiCA' AUXILIARY MAY ( cACertificate $ certificateRevocationList $ authorityRevocationList $ crossCertificatePair $ attributeCertificate $ publicKey $ privateKey $ networkAddress $ loginTime $ lastLoginTime $ cAECCertificate $ crossCertificatePairEC ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.21 NAME 'pkiUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.15 NAME 'strongAuthenticationUser' AUXILIARY MAY userCertificate X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.11 NAME 'applicationProcess' SUP Top STRUCTURAL MUST cn MAY ( seeAlso $ ou $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
+ "( 2.5.6.12 NAME 'applicationEntity' SUP Top STRUCTURAL MUST ( presentationAddress $ cn ) MAY ( supportedApplicationContext $ seeAlso $ ou $ o $ l $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
+ "( 2.5.6.13 NAME 'dSA' SUP applicationEntity STRUCTURAL MAY knowledgeInformation X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
+ "( 2.5.6.16 NAME 'certificationAuthority' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY crossCertificatePair X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.18 NAME 'userSecurityInformation' AUXILIARY MAY supportedAlgorithms X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.20 NAME 'dmd' SUP ndsLoginProperties AUXILIARY MUST dmdName MAY ( searchGuide $ seeAlso $ businessCategory $ x121Address $ registeredAddress $ destinationIndicator $ preferredDeliveryMethod $ telexNumber $ teletexTerminalIdentifier $ telephoneNumber $ internationaliSDNNumber $ facsimileTelephoneNumber $ street $ postOfficeBox $ postalCode $ postalAddress $ physicalDeliveryOfficeName $ l $ description $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.5.6.16.2 NAME 'certificationAuthority-V2' AUXILIARY MUST ( authorityRevocationList $ certificateRevocationList $ cACertificate ) MAY ( crossCertificatePair $ deltaRevocationList ) X-NDS_NAME 'certificationAuthorityVer2' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.3.6.1 NAME 'httpServer' SUP Top STRUCTURAL MUST cn MAY ( httpHostServerDN $ httpThreadsPerCPU $ httpIOBufferSize $ httpRequestTimeout $ httpKeepAliveRequestTimeout $ httpSessionTimeout $ httpKeyMaterialObject $ httpTraceLevel $ httpAuthRequiresTLS $ httpDefaultClearPort $ httpDefaultTLSPort $ httpBindRestrictions ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.64.6.1.1 NAME 'Template' SUP Top STRUCTURAL MUST cn MAY ( trusteesOfNewObject $ newObjectSDSRights $ newObjectSFSRights $ setupScript $ runSetupScript $ membersOfTemplate $ volumeSpaceRestrictions $ setPasswordAfterCreate $ homeDirectoryRights $ accountBalance $ allowUnlimitedCredit $ description $ eMailAddress $ facsimileTelephoneNumber $ groupMembership $ higherPrivileges $ ndsHomeDirectory $ l $ Language $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginGraceLimit $ loginMaximumSimultaneous $ loginScript $ mailboxID $ mailboxLocation $ member $ messageServer $ minimumAccountBalance $ networkAddressRestriction $ newObjectSSelfRights $ ou $ passwordAllowChange $ passwordExpirationInterval $ passwordExpirationTime $ passwordMinimumLength $ passwordRequired $ passwordUniqueRequired $ physicalDeliveryOfficeName $ postalAddress $ postalCode $ postOfficeBox $ Profile $ st $ street $ securityEquals $ securityFlags $ seeAlso $ telephoneNumber $ title $ assistant $ assistantPhone $ city $ company $ co $ manager $ managerWorkforceID $ mailstop $ siteLocation $ employeeType $ costCenter $ costCenterDescription $ tollFreePhoneNumber $ departmentNumber ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.8.6.1 NAME 'homeInfo' AUXILIARY MAY ( homeCity $ homeEmailAddress $ homeFax $ homePhone $ homeState $ homePostalAddress $ homeZipCode $ personalMobile $ spouse $ children ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.8.6.2 NAME 'contingentWorker' AUXILIARY MAY ( vendorName $ vendorAddress $ vendorPhoneNumber ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.45 NAME 'dynamicGroup' SUP ( groupOfNames $ ndsLoginProperties ) STRUCTURAL MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.46 NAME 'dynamicGroupAux' SUP ( groupOfNames $ ndsLoginProperties ) AUXILIARY MAY ( memberQueryURL $ excludedMember $ dgIdentity $ dgAllowUnknown $ dgTimeOut $ dgAllowDuplicates $ userPassword ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.6.1.1 NAME 'sASSecurity' SUP Top STRUCTURAL MUST cn MAY ( nDSPKITreeCADN $ masvPolicyDN $ sASLoginPolicyDN $ sASLoginMethodContainerDN $ sasPostLoginMethodContainerDN $ nspmPolicyAgentContainerDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'treeRoot' 'Country' 'Organization' 'domain' ) X-NDS_NAME 'SAS:Security' )",
+ "( 2.16.840.1.113719.1.39.6.1.2 NAME 'sASService' SUP Resource STRUCTURAL MAY ( hostServer $ privateKey $ publicKey $ allowUnlimitedCredit $ fullName $ lastLoginTime $ lockedByIntruder $ loginAllowedTimeMap $ loginDisabled $ loginExpirationTime $ loginIntruderAddress $ loginIntruderAttempts $ loginIntruderResetTime $ loginMaximumSimultaneous $ loginTime $ networkAddress $ networkAddressRestriction $ notify $ operator $ owner $ path $ securityEquals $ securityFlags $ status $ Version $ nDSPKIKeyMaterialDN $ ndspkiKMOExport ) X-NDS_NAMING 'cn' X-NDS_NAME 'SAS:Service' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.1 NAME 'nDSPKICertificateAuthority' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKICertificateChainEC $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ nDSPKIPublicKeyEC $ nDSPKIPrivateKeyEC $ nDSPKIPublicKeyCertificateEC $ crossCertificatePairEC $ nDSPKISuiteBMode $ cACertificate $ cAECCertificate $ ndspkiCRLContainerDN $ ndspkiIssuedCertContainerDN $ ndspkiCRLConfigurationDNList $ ndspkiCRLECConfigurationDNList $ ndspkiSecurityRightsLevel $ ndspkiDefaultRSAKeySize $ ndspkiDefaultECCurve $ ndspkiDefaultCertificateLife ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:Certificate Authority' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.2 NAME 'nDSPKIKeyMaterial' SUP Top STRUCTURAL MUST cn MAY ( hostServer $ nDSPKIKeyFile $ nDSPKIPrivateKey $ nDSPKIPublicKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKISubjectName $ nDSPKIGivenName $ ndspkiAdditionalRoots $ nDSPKINotBefore $ nDSPKINotAfter ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'domain' ) X-NDS_NAME 'NDSPKI:Key Material' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.3 NAME 'nDSPKITrustedRoot' SUP Top STRUCTURAL MUST cn MAY ndspkiTrustedRootList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NAME 'NDSPKI:Trusted Root' )",
+ "( 2.16.840.1.113719.1.48.6.1.4 NAME 'nDSPKITrustedRootObject' SUP Top STRUCTURAL MUST ( cn $ nDSPKITrustedRootCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKITrustedRoot' X-NDS_NAME 'NDSPKI:Trusted Root Object' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.101 NAME 'nDSPKISDKeyAccessPartition' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'NDSPKI:SD Key Access Partition' )",
+ "( 2.16.840.1.113719.1.48.6.1.102 NAME 'nDSPKISDKeyList' SUP Top STRUCTURAL MUST cn MAY ( nDSPKISDKeyServerDN $ nDSPKISDKeyStruct $ nDSPKISDKeyCert ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nDSPKISDKeyAccessPartition' X-NDS_NAME 'NDSPKI:SD Key List' )",
+ "( 2.16.840.1.113719.1.31.6.2.1 NAME 'mASVSecurityPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ masvDomainPolicy $ masvPolicyUpdate $ masvClearanceNames $ masvLabelNames $ masvLabelSecrecyLevelNames $ masvLabelSecrecyCategoryNames $ masvLabelIntegrityLevelNames $ masvLabelIntegrityCategoryNames $ masvNDSAttributeLabels ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'MASV:Security Policy' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.1 NAME 'sASLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'SAS:Login Method Container' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.4 NAME 'sASLoginPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ privateKey $ publicKey $ sASAllowNDSPasswordWindow $ sASPolicyCredentials $ sASPolicyMethods $ sASPolicyObjectVersion $ sASPolicyServiceSubtypes $ sASPolicyServices $ sASPolicyUsers $ sASLoginSequence $ sASLoginPolicyUpdate $ sasNMASProductOptions $ sasPolicyMethods $ sasPolicyServices $ sasPolicyUsers $ sasAllowNDSPasswordWindow $ sasLoginFailureDelay $ sasDefaultLoginSequence $ sasAuthorizedLoginSequences $ sasAuditConfiguration $ sasUpdateLoginInfo $ sasOTPEnabled $ sasOTPLookAheadWindow $ sasOTPDigits $ sasUpdateLoginTimeInterval $ nspmPasswordPolicyDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' X-NDS_NAME 'SAS:Login Policy' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.7 NAME 'sASNMASBaseLoginMethod' SUP Top ABSTRACT MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasCertificateSearchContainers $ sasNMASMethodConfigData $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASLoginMethodContainer' X-NDS_NAME 'SAS:NMAS Base Login Method' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.8 NAME 'sASNMASLoginMethod' SUP sASNMASBaseLoginMethod STRUCTURAL X-NDS_NAME 'SAS:NMAS Login Method' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.9 NAME 'rADIUSDialAccessSystem' SUP Top STRUCTURAL MUST cn MAY ( publicKey $ privateKey $ rADIUSAgedInterval $ rADIUSClient $ rADIUSCommonNameResolution $ rADIUSConcurrentLimit $ rADIUSDASVersion $ rADIUSEnableCommonNameLogin $ rADIUSEnableDialAccess $ rADIUSInterimAcctingTimeout $ rADIUSLookupContexts $ rADIUSMaxDASHistoryRecord $ rADIUSMaximumHistoryRecord $ rADIUSPasswordPolicy $ rADIUSPrivateKey $ rADIUSProxyContext $ rADIUSProxyDomain $ rADIUSProxyTarget $ rADIUSPublicKey $ sASLoginConfiguration $ sASLoginConfigurationKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Dial Access System' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.10 NAME 'rADIUSProfile' SUP Top STRUCTURAL MUST cn MAY rADIUSAttributeList X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NAME 'RADIUS:Profile' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.11 NAME 'sasPostLoginMethodContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
+ "( 2.16.840.1.113719.1.39.42.2.0.12 NAME 'sasPostLoginMethod' SUP Top STRUCTURAL MUST cn MAY ( description $ sASLoginSecret $ sASLoginSecretKey $ sASEncryptionType $ sASLoginConfiguration $ sASLoginConfigurationKey $ sASMethodIdentifier $ sASMethodVendor $ sASVendorSupport $ sASAdvisoryMethodGrade $ sASLoginClientMethodNetWare $ sASLoginServerMethodNetWare $ sASLoginClientMethodWINNT $ sASLoginServerMethodWINNT $ sasMethodVersion $ sASLoginPolicyUpdate $ sasUnsignedMethodModules $ sasServerModuleName $ sasServerModuleEntryPointName $ sasSASLMechanismName $ sasSASLMechanismEntryPointName $ sasClientModuleName $ sasClientModuleEntryPointName $ sasLoginClientMethodSolaris $ sasLoginServerMethodSolaris $ sasLoginClientMethodLinux $ sasLoginServerMethodLinux $ sasLoginClientMethodTru64 $ sasLoginServerMethodTru64 $ sasLoginClientMethodAIX $ sasLoginServerMethodAIX $ sasLoginClientMethodHPUX $ sasLoginServerMethodHPUX $ sasLoginClientMethods390 $ sasLoginServerMethods390 $ sasLoginClientMethodLinuxX64 $ sasLoginServerMethodLinuxX64 $ sasLoginClientMethodWinX64 $ sasLoginServerMethodWinX64 $ sasLoginClientMethodSolaris64 $ sasLoginServerMethodSolaris64 $ sasLoginClientMethodSolarisi386 $ sasLoginServerMethodSolarisi386 $ sasLoginClientMethodAIX64 $ sasLoginServerMethodAIX64 ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sasPostLoginMethodContainer' )",
+ "( 2.16.840.1.113719.1.6.6.1 NAME 'snmpGroup' SUP Top STRUCTURAL MUST cn MAY ( Version $ snmpServerList $ snmpTrapDisable $ snmpTrapInterval $ snmpTrapDescription $ snmpTrapConfig ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.43.6.2 NAME 'nspmPasswordPolicyContainer' SUP Top STRUCTURAL MUST cn MAY description X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) )",
+ "( 2.16.840.1.113719.1.39.43.6.3 NAME 'nspmPolicyAgent' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyAgentNetWare $ nspmPolicyAgentWINNT $ nspmPolicyAgentSolaris $ nspmPolicyAgentLinux $ nspmPolicyAgentAIX $ nspmPolicyAgentHPUX ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'nspmPasswordPolicyContainer' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.43.6.1 NAME 'nspmPasswordPolicy' SUP Top STRUCTURAL MUST cn MAY ( description $ nspmPolicyPrecedence $ nspmConfigurationOptions $ nspmChangePasswordMessage $ passwordExpirationInterval $ loginGraceLimit $ nspmMinPasswordLifetime $ passwordUniqueRequired $ nspmPasswordHistoryLimit $ nspmPasswordHistoryExpiration $ passwordAllowChange $ passwordRequired $ passwordMinimumLength $ nspmMaximumLength $ nspmCaseSensitive $ nspmMinUpperCaseCharacters $ nspmMaxUpperCaseCharacters $ nspmMinLowerCaseCharacters $ nspmMaxLowerCaseCharacters $ nspmNumericCharactersAllowed $ nspmNumericAsFirstCharacter $ nspmNumericAsLastCharacter $ nspmMinNumericCharacters $ nspmMaxNumericCharacters $ nspmSpecialCharactersAllowed $ nspmSpecialAsFirstCharacter $ nspmSpecialAsLastCharacter $ nspmMinSpecialCharacters $ nspmMaxSpecialCharacters $ nspmMaxRepeatedCharacters $ nspmMaxConsecutiveCharacters $ nspmMinUniqueCharacters $ nspmDisallowedAttributeValues $ nspmExcludeList $ nspmExtendedCharactersAllowed $ nspmExtendedAsFirstCharacter $ nspmExtendedAsLastCharacter $ nspmMinExtendedCharacters $ nspmMaxExtendedCharacters $ nspmUpperAsFirstCharacter $ nspmUpperAsLastCharacter $ nspmLowerAsFirstCharacter $ nspmLowerAsLastCharacter $ nspmComplexityRules $ nspmAD2K8Syntax $ nspmAD2K8maxViolation $ nspmXCharLimit $ nspmXCharHistoryLimit $ nspmUnicodeAllowed $ nspmNonAlphaCharactersAllowed $ nspmMinNonAlphaCharacters $ nspmMaxNonAlphaCharacters $ pwdInHistory $ nspmAdminsDoNotExpirePassword $ nspmPasswordACL $ nsimChallengeSetDN $ nsimForgottenAction $ nsimForgottenLoginConfig $ nsimAssignments $ nsimChallengeSetGUID $ nsimPwdRuleEnforcement ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'domain' 'Locality' 'Organization' 'organizationalUnit' 'Country' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.43.6.4 NAME 'nspmPasswordAux' AUXILIARY MAY ( publicKey $ privateKey $ loginGraceLimit $ loginGraceRemaining $ passwordExpirationTime $ passwordRequired $ nspmPasswordKey $ nspmPassword $ nspmDistributionPassword $ nspmPreviousDistributionPassword $ nspmPasswordHistory $ nspmAdministratorChangeCount $ nspmPasswordPolicyDN $ pwdChangedTime $ pwdAccountLockedTime $ pwdFailureTime $ nspmDoNotExpirePassword ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.12.6.1.0 NAME 'auditFileObject' SUP Top STRUCTURAL MUST ( cn $ auditPolicy $ auditContents ) MAY ( description $ auditPath $ auditLinkList $ auditType $ auditCurrentEncryptionKey $ auditAEncryptionKey $ auditBEncryptionKey ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Top' 'Country' 'Locality' 'Organization' 'organizationalUnit' 'treeRoot' 'domain' ) X-NDS_NAME 'Audit:File Object' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.38.6.1.4 NAME 'wANMANLANArea' SUP Top STRUCTURAL MUST cn MAY ( description $ l $ member $ o $ ou $ owner $ seeAlso $ wANMANWANPolicy $ wANMANCost $ wANMANDefaultCost ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NAME 'WANMAN:LAN Area' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.37.1 NAME 'rbsCollection' SUP Top STRUCTURAL MUST cn MAY ( owner $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
+ "( 2.16.840.1.113719.1.135.6.30.1 NAME 'rbsExternalScope' SUP Top ABSTRACT MUST cn MAY ( rbsURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.31.1 NAME 'rbsModule' SUP Top STRUCTURAL MUST cn MAY ( rbsURL $ rbsPath $ rbsType $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
+ "( 2.16.840.1.113719.1.135.6.32.1 NAME 'rbsRole' SUP Top STRUCTURAL MUST cn MAY ( rbsContent $ rbsMember $ rbsTrusteeOf $ rbsGALabel $ rbsParameters $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection' )",
+ "( 2.16.840.1.113719.1.135.6.33.1 NAME 'rbsTask' SUP Top STRUCTURAL MUST cn MAY ( rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ rbsTaskTemplates $ rbsTaskTemplatesURL $ description $ rbsXMLInfo ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.34.1 NAME 'rbsBook' SUP rbsTask STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.35.1 NAME 'rbsScope' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.45.1 NAME 'rbsCollection2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsParameters $ owner $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'domain' ) )",
+ "( 2.16.840.1.113719.1.135.6.38.1 NAME 'rbsExternalScope2' SUP Top ABSTRACT MUST cn MAY ( rbsXMLInfo $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.39.1 NAME 'rbsModule2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsPath $ rbsType $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
+ "( 2.16.840.1.113719.1.135.6.40.1 NAME 'rbsRole2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContent $ rbsMember $ rbsTrusteeOf $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsCollection2' )",
+ "( 2.16.840.1.113719.1.135.6.41.1 NAME 'rbsTask2' SUP Top STRUCTURAL MUST cn MAY ( rbsXMLInfo $ rbsContentMembership $ rbsType $ rbsTaskRights $ rbsEntryPoint $ rbsParameters $ description ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'rbsModule2' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.42.1 NAME 'rbsBook2' SUP rbsTask2 STRUCTURAL MAY ( rbsTargetObjectType $ rbsPageMembership ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.135.6.43.1 NAME 'rbsScope2' SUP groupOfNames STRUCTURAL MAY ( rbsContext $ rbsXMLInfo ) X-NDS_CONTAINMENT 'rbsRole2' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.49 NAME 'prSyncPolicy' SUP Top STRUCTURAL MUST cn MAY prSyncAttributes X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'domain' 'Country' 'Locality' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.50 NAME 'encryptionPolicy' SUP Top STRUCTURAL MUST cn MAY ( attrEncryptionDefinition $ attrEncryptionRequiresSecure ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'domain' 'organizationalUnit' 'Organization' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.5 NAME 'ndspkiContainer' SUP Top STRUCTURAL MUST cn X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'ndspkiContainer' 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'nDSPKITrustedRoot' ) )",
+ "( 2.16.840.1.113719.1.48.6.1.6 NAME 'ndspkiCertificate' SUP Top STRUCTURAL MUST ( cn $ userCertificate ) MAY ( nDSPKISubjectName $ nDSPKINotBefore $ nDSPKINotAfter $ externalName $ givenName $ sn ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sASSecurity' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'ndspkiContainer' 'nDSPKITrustedRoot' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.48.6.1.7 NAME 'ndspkiCRLConfiguration' SUP Top STRUCTURAL MUST cn MAY ( ndspkiCRLFileName $ ndspkiDirectory $ ndspkiStatus $ ndspkiIssueTime $ ndspkiNextIssueTime $ ndspkiAttemptTime $ ndspkiTimeInterval $ ndspkiCRLMaxProcessingInterval $ ndspkiCRLNumber $ ndspkiDistributionPoints $ ndspkiDistributionPointDN $ ndspkiCADN $ ndspkiCRLProcessData $ nDSPKIPublicKey $ nDSPKIPrivateKey $ nDSPKIPublicKeyCertificate $ nDSPKICertificateChain $ nDSPKIParentCA $ nDSPKIParentCADN $ nDSPKISubjectName $ cACertificate $ hostServer $ ndspkiCRLType $ ndspkiCRLExtendValidity ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'ndspkiContainer' )",
+ "( 2.5.6.19 NAME 'cRLDistributionPoint' SUP Top STRUCTURAL MUST cn MAY ( authorityRevocationList $ authorityRevocationList $ cACertificate $ certificateRevocationList $ certificateRevocationList $ crossCertificatePair $ deltaRevocationList $ deltaRevocationList $ ndspkiCRLConfigurationDN ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'Country' 'Locality' 'organizationalUnit' 'Organization' 'sASSecurity' 'domain' 'ndspkiCRLConfiguration' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.7.6.1 NAME 'notfTemplateCollection' SUP Top STRUCTURAL MUST cn MAY ( notfSMTPEmailHost $ notfSMTPEmailFrom $ notfSMTPEmailUserName $ sASSecretStore ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
+ "( 2.16.840.1.113719.1.7.6.2 NAME 'notfMergeTemplate' SUP Top STRUCTURAL MUST cn MAY ( notfMergeTemplateData $ notfMergeTemplateSubject ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'notfTemplateCollection' X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.39.44.6.1 NAME 'nsimChallengeSet' SUP Top STRUCTURAL MUST cn MAY ( description $ nsimRequiredQuestions $ nsimRandomQuestions $ nsimNumberRandomQuestions $ nsimMinResponseLength $ nsimMaxResponseLength ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'nspmPasswordPolicyContainer' 'Country' 'domain' 'Locality' 'Organization' 'organizationalUnit' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.266.6.1 NAME 'sssServerPolicies' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT 'sASSecurity' )",
+ "( 2.16.840.1.113719.1.266.6.2 NAME 'sssServerPolicyOverride' SUP Top STRUCTURAL MUST cn MAY ( sssCacheRefreshInterval $ sssEnableReadTimestamps $ sssDisableMasterPasswords $ sssEnableAdminAccess $ sssAdminList $ sssAdminGALabel $ sssReadSecretPolicies ) X-NDS_NAMING 'cn' X-NDS_CONTAINMENT ( 'sssServerPolicies' 'Organization' 'organizationalUnit' 'Country' 'Locality' 'domain' ) X-NDS_NOT_CONTAINER '1' )",
+ "( 2.16.840.1.113719.1.1.6.1.91 NAME 'nestedGroupAux' AUXILIARY MAY ( groupMember $ excludedMember $ nestedConfig $ groupMembership ) X-NDS_NOT_CONTAINER '1' )"
+ ]
+ },
+ "schema_entry": "cn=schema",
+ "type": "SchemaInfo"
+}
+"""
+
+edir_9_1_4_dsa_info = """
+{
+ "raw": {
+ "abandonOps": [
+ "0"
+ ],
+ "addEntryOps": [
+ "0"
+ ],
+ "altServer": [],
+ "bindSecurityErrors": [
+ "0"
+ ],
+ "chainings": [
+ "0"
+ ],
+ "compareOps": [
+ "0"
+ ],
+ "directoryTreeName": [
+ "TEST_TREE"
+ ],
+ "dsaName": [
+ "cn=MYSERVER,o=resources"
+ ],
+ "errors": [
+ "0"
+ ],
+ "extendedOps": [
+ "0"
+ ],
+ "inBytes": [
+ "293"
+ ],
+ "inOps": [
+ "3"
+ ],
+ "listOps": [
+ "0"
+ ],
+ "modifyEntryOps": [
+ "0"
+ ],
+ "modifyRDNOps": [
+ "0"
+ ],
+ "namingContexts": [
+ ""
+ ],
+ "oneLevelSearchOps": [
+ "0"
+ ],
+ "outBytes": [
+ "14"
+ ],
+ "readOps": [
+ "1"
+ ],
+ "referralsReturned": [
+ "0"
+ ],
+ "removeEntryOps": [
+ "0"
+ ],
+ "repUpdatesIn": [
+ "0"
+ ],
+ "repUpdatesOut": [
+ "0"
+ ],
+ "searchOps": [
+ "1"
+ ],
+ "securityErrors": [
+ "0"
+ ],
+ "simpleAuthBinds": [
+ "1"
+ ],
+ "strongAuthBinds": [
+ "0"
+ ],
+ "subschemaSubentry": [
+ "cn=schema"
+ ],
+ "supportedCapabilities": [],
+ "supportedControl": [
+ "2.16.840.1.113719.1.27.101.6",
+ "2.16.840.1.113719.1.27.101.5",
+ "1.2.840.113556.1.4.319",
+ "2.16.840.1.113730.3.4.3",
+ "2.16.840.1.113730.3.4.2",
+ "2.16.840.1.113719.1.27.101.57",
+ "2.16.840.1.113719.1.27.103.7",
+ "2.16.840.1.113719.1.27.101.40",
+ "2.16.840.1.113719.1.27.101.41",
+ "1.2.840.113556.1.4.1413",
+ "1.2.840.113556.1.4.805",
+ "2.16.840.1.113730.3.4.18",
+ "1.2.840.113556.1.4.529"
+ ],
+ "supportedExtension": [
+ "2.16.840.1.113719.1.148.100.1",
+ "2.16.840.1.113719.1.148.100.3",
+ "2.16.840.1.113719.1.148.100.5",
+ "2.16.840.1.113719.1.148.100.7",
+ "2.16.840.1.113719.1.148.100.9",
+ "2.16.840.1.113719.1.148.100.11",
+ "2.16.840.1.113719.1.148.100.13",
+ "2.16.840.1.113719.1.148.100.15",
+ "2.16.840.1.113719.1.148.100.17",
+ "2.16.840.1.113719.1.39.42.100.1",
+ "2.16.840.1.113719.1.39.42.100.3",
+ "2.16.840.1.113719.1.39.42.100.5",
+ "2.16.840.1.113719.1.39.42.100.7",
+ "2.16.840.1.113719.1.39.42.100.9",
+ "2.16.840.1.113719.1.39.42.100.11",
+ "2.16.840.1.113719.1.39.42.100.13",
+ "2.16.840.1.113719.1.39.42.100.15",
+ "2.16.840.1.113719.1.39.42.100.17",
+ "2.16.840.1.113719.1.39.42.100.19",
+ "2.16.840.1.113719.1.39.42.100.21",
+ "2.16.840.1.113719.1.39.42.100.23",
+ "2.16.840.1.113719.1.39.42.100.25",
+ "2.16.840.1.113719.1.39.42.100.27",
+ "2.16.840.1.113719.1.39.42.100.29",
+ "1.3.6.1.4.1.4203.1.11.1",
+ "2.16.840.1.113719.1.27.100.1",
+ "2.16.840.1.113719.1.27.100.3",
+ "2.16.840.1.113719.1.27.100.5",
+ "2.16.840.1.113719.1.27.100.7",
+ "2.16.840.1.113719.1.27.100.11",
+ "2.16.840.1.113719.1.27.100.13",
+ "2.16.840.1.113719.1.27.100.15",
+ "2.16.840.1.113719.1.27.100.17",
+ "2.16.840.1.113719.1.27.100.19",
+ "2.16.840.1.113719.1.27.100.21",
+ "2.16.840.1.113719.1.27.100.23",
+ "2.16.840.1.113719.1.27.100.25",
+ "2.16.840.1.113719.1.27.100.27",
+ "2.16.840.1.113719.1.27.100.29",
+ "2.16.840.1.113719.1.27.100.31",
+ "2.16.840.1.113719.1.27.100.33",
+ "2.16.840.1.113719.1.27.100.35",
+ "2.16.840.1.113719.1.27.100.37",
+ "2.16.840.1.113719.1.27.100.39",
+ "2.16.840.1.113719.1.27.100.41",
+ "2.16.840.1.113719.1.27.100.96",
+ "2.16.840.1.113719.1.27.100.98",
+ "2.16.840.1.113719.1.27.100.101",
+ "2.16.840.1.113719.1.27.100.103",
+ "2.16.840.1.113719.1.142.100.1",
+ "2.16.840.1.113719.1.142.100.4",
+ "2.16.840.1.113719.1.142.100.6",
+ "2.16.840.1.113719.1.27.100.9",
+ "2.16.840.1.113719.1.27.100.43",
+ "2.16.840.1.113719.1.27.100.45",
+ "2.16.840.1.113719.1.27.100.47",
+ "2.16.840.1.113719.1.27.100.49",
+ "2.16.840.1.113719.1.27.100.51",
+ "2.16.840.1.113719.1.27.100.53",
+ "2.16.840.1.113719.1.27.100.55",
+ "1.3.6.1.4.1.1466.20037",
+ "2.16.840.1.113719.1.27.100.79",
+ "2.16.840.1.113719.1.27.100.84",
+ "2.16.840.1.113719.1.27.103.1",
+ "2.16.840.1.113719.1.27.103.2"
+ ],
+ "supportedFeatures": [
+ "1.3.6.1.4.1.4203.1.5.1",
+ "2.16.840.1.113719.1.27.99.1"
+ ],
+ "supportedGroupingTypes": [
+ "2.16.840.1.113719.1.27.103.8"
+ ],
+ "supportedLDAPVersion": [
+ "2",
+ "3"
+ ],
+ "supportedSASLMechanisms": [
+ "NMAS_LOGIN"
+ ],
+ "unAuthBinds": [
+ "0"
+ ],
+ "vendorName": [
+ "NetIQ Corporation"
+ ],
+ "vendorVersion": [
+ "LDAP Agent for NetIQ eDirectory 9.1.4 (40105.09)"
+ ],
+ "wholeSubtreeSearchOps": [
+ "0"
+ ]
+ },
+ "type": "DsaInfo"
+}
+""" \ No newline at end of file
diff --git a/ldap3/protocol/schemas/slapd24.py b/ldap3/protocol/schemas/slapd24.py
index 30e1795..1c66332 100644
--- a/ldap3/protocol/schemas/slapd24.py
+++ b/ldap3/protocol/schemas/slapd24.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/strategy/asyncStream.py b/ldap3/strategy/asyncStream.py
index 7977d7e..631331c 100644
--- a/ldap3/strategy/asyncStream.py
+++ b/ldap3/strategy/asyncStream.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -55,7 +55,11 @@ class AsyncStreamStrategy(AsyncStrategy):
self.persistent_search_message_id = None
self.streaming = False
self.callback = None
- self.events = Queue()
+ if ldap_connection.pool_size:
+ self.events = Queue(ldap_connection.pool_size)
+ else:
+ self.events = Queue()
+
del self._requests # remove _requests dict from Async Strategy
def _start_listen(self):
@@ -77,7 +81,6 @@ class AsyncStreamStrategy(AsyncStrategy):
if not self._header_added and self.stream.tell() == 0:
header = add_ldif_header(['-'])[0]
self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
-
ldif_lines = persistent_search_response_to_ldif(change)
if self.stream and ldif_lines and not self.connection.closed:
fragment = self.line_separator.join(ldif_lines)
diff --git a/ldap3/strategy/asynchronous.py b/ldap3/strategy/asynchronous.py
index 8ac79ee..b772ad2 100644
--- a/ldap3/strategy/asynchronous.py
+++ b/ldap3/strategy/asynchronous.py
@@ -1,221 +1,253 @@
-"""
-"""
-
-# Created on 2013.07.15
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from threading import Thread, Lock
-import socket
-
-from .. import get_config_parameter
-from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult
-from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE
-from ..protocol.rfc4511 import LDAPMessage
-from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED
-from ..utils.asn1 import decoder, decode_message_fast
-
-
-# noinspection PyProtectedMember
-class AsyncStrategy(BaseStrategy):
- """
- This strategy is asynchronous. You send the request and get the messageId of the request sent
- Receiving data from socket is managed in a separated thread in a blocking mode
- Requests return an int value to indicate the messageId of the requested Operation
- You get the response with get_response, it has a timeout to wait for response to appear
- Connection.response will contain the whole LDAP response for the messageId requested in a dict form
- Connection.request will contain the result LDAP message in a dict form
- Response appear in strategy._responses dictionary
- """
-
- # noinspection PyProtectedMember
- class ReceiverSocketThread(Thread):
- """
- The thread that actually manage the receiver socket
- """
-
- def __init__(self, ldap_connection):
- Thread.__init__(self)
- self.connection = ldap_connection
- self.socket_size = get_config_parameter('SOCKET_SIZE')
-
- def run(self):
- """
- Wait for data on socket, compute the length of the message and wait for enough bytes to decode the message
- Message are appended to strategy._responses
- """
- unprocessed = b''
- get_more_data = True
- listen = True
- data = b''
- while listen:
- if get_more_data:
- try:
- data = self.connection.socket.recv(self.socket_size)
- except (OSError, socket.error, AttributeError):
- if self.connection.receive_timeout: # a receive timeout has been detected - keep kistening on the socket
- continue
- except Exception as e:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', str(e), self.connection)
- raise # unexpected exception - re-raise
- if len(data) > 0:
- unprocessed += data
- data = b''
- else:
- listen = False
- length = BaseStrategy.compute_ldap_message_size(unprocessed)
- if length == -1 or len(unprocessed) < length:
- get_more_data = True
- elif len(unprocessed) >= length: # add message to message list
- if self.connection.usage:
- self.connection._usage.update_received_message(length)
- if log_enabled(NETWORK):
- log(NETWORK, 'received %d bytes via <%s>', length, self.connection)
- if self.connection.fast_decoder:
- ldap_resp = decode_message_fast(unprocessed[:length])
- dict_response = self.connection.strategy.decode_response_fast(ldap_resp)
- else:
- ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0]
- dict_response = self.connection.strategy.decode_response(ldap_resp)
- message_id = int(ldap_resp['messageID'])
- if log_enabled(NETWORK):
- log(NETWORK, 'received 1 ldap message via <%s>', self.connection)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
- if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')):
- if dict_response['result'] == 0: # StartTls in progress
- if self.connection.server.tls:
- self.connection.server.tls._start_tls(self.connection)
- else:
- self.connection.last_error = 'no Tls object defined in Server'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSSLConfigurationError(self.connection.last_error)
- else:
- self.connection.last_error = 'asynchronous StartTls failed'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPStartTLSError(self.connection.last_error)
- del self.connection._awaiting_for_async_start_tls
- if message_id != 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
- with self.connection.strategy.async_lock:
- if message_id in self.connection.strategy._responses:
- self.connection.strategy._responses[message_id].append(dict_response)
- else:
- self.connection.strategy._responses[message_id] = [dict_response]
- if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
- self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE)
- if self.connection.strategy.can_stream: # for AsyncStreamStrategy, used for PersistentSearch
- self.connection.strategy.accumulate_stream(message_id, dict_response)
- unprocessed = unprocessed[length:]
- get_more_data = False if unprocessed else True
- listen = True if self.connection.listening or unprocessed else False
- else: # Unsolicited Notification
- if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
- listen = False
- else:
- self.connection.last_error = 'unknown unsolicited notification from server'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPStartTLSError(self.connection.last_error)
- self.connection.strategy.close()
-
- def __init__(self, ldap_connection):
- BaseStrategy.__init__(self, ldap_connection)
- self.sync = False
- self.no_real_dsa = False
- self.pooled = False
- self._responses = None
- self._requests = None
- self.can_stream = False
- self.receiver = None
- self.async_lock = Lock()
-
- def open(self, reset_usage=True, read_server_info=True):
- """
- Open connection and start listen on the socket in a different thread
- """
- with self.connection.connection_lock:
- self._responses = dict()
- self._requests = dict()
- BaseStrategy.open(self, reset_usage, read_server_info)
-
- if read_server_info:
- try:
- self.connection.refresh_server_info()
- except LDAPOperationResult: # catch errors from server if raise_exception = True
- self.connection.server._dsa_info = None
- self.connection.server._schema_info = None
-
- def close(self):
- """
- Close connection and stop socket thread
- """
- with self.connection.connection_lock:
- BaseStrategy.close(self)
-
- def post_send_search(self, message_id):
- """
- Clears connection.response and returns messageId
- """
- self.connection.response = None
- self.connection.request = None
- self.connection.result = None
- return message_id
-
- def post_send_single_response(self, message_id):
- """
- Clears connection.response and returns messageId.
- """
- self.connection.response = None
- self.connection.request = None
- self.connection.result = None
- return message_id
-
- def _start_listen(self):
- """
- Start thread in daemon mode
- """
- if not self.connection.listening:
- self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection)
- self.connection.listening = True
- self.receiver.daemon = True
- self.receiver.start()
-
- def _get_response(self, message_id):
- """
- Performs the capture of LDAP response for this strategy
- Checks lock to avoid race condition with receiver thread
- """
- with self.async_lock:
- responses = self._responses.pop(message_id) if message_id in self._responses and self._responses[message_id][-1] == RESPONSE_COMPLETE else None
-
- return responses
-
- def receiving(self):
- raise NotImplementedError
-
- def get_stream(self):
- raise NotImplementedError
-
- def set_stream(self, value):
- raise NotImplementedError
+"""
+"""
+
+# Created on 2013.07.15
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from threading import Thread, Lock, Event
+import socket
+
+from .. import get_config_parameter
+from ..core.exceptions import LDAPSSLConfigurationError, LDAPStartTLSError, LDAPOperationResult
+from ..strategy.base import BaseStrategy, RESPONSE_COMPLETE
+from ..protocol.rfc4511 import LDAPMessage
+from ..utils.log import log, log_enabled, format_ldap_message, ERROR, NETWORK, EXTENDED
+from ..utils.asn1 import decoder, decode_message_fast
+
+
+# noinspection PyProtectedMember
+class AsyncStrategy(BaseStrategy):
+ """
+ This strategy is asynchronous. You send the request and get the messageId of the request sent
+ Receiving data from socket is managed in a separated thread in a blocking mode
+ Requests return an int value to indicate the messageId of the requested Operation
+ You get the response with get_response, it has a timeout to wait for response to appear
+ Connection.response will contain the whole LDAP response for the messageId requested in a dict form
+ Connection.request will contain the result LDAP message in a dict form
+ Response appear in strategy._responses dictionary
+ """
+
+ # noinspection PyProtectedMember
+ class ReceiverSocketThread(Thread):
+ """
+ The thread that actually manage the receiver socket
+ """
+
+ def __init__(self, ldap_connection):
+ Thread.__init__(self)
+ self.connection = ldap_connection
+ self.socket_size = get_config_parameter('SOCKET_SIZE')
+
+ def run(self):
+ """
+ Waits for data on socket, computes the length of the message and waits for enough bytes to decode the message
+ Message are appended to strategy._responses
+ """
+ unprocessed = b''
+ get_more_data = True
+ listen = True
+ data = b''
+ while listen:
+ if get_more_data:
+ try:
+ data = self.connection.socket.recv(self.socket_size)
+ except (OSError, socket.error, AttributeError):
+ if self.connection.receive_timeout: # a receive timeout has been detected - keep kistening on the socket
+ continue
+ except Exception as e:
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', str(e), self.connection)
+ raise # unexpected exception - re-raise
+ if len(data) > 0:
+ unprocessed += data
+ data = b''
+ else:
+ listen = False
+ length = BaseStrategy.compute_ldap_message_size(unprocessed)
+ if length == -1 or len(unprocessed) < length:
+ get_more_data = True
+ elif len(unprocessed) >= length: # add message to message list
+ if self.connection.usage:
+ self.connection._usage.update_received_message(length)
+ if log_enabled(NETWORK):
+ log(NETWORK, 'received %d bytes via <%s>', length, self.connection)
+ if self.connection.fast_decoder:
+ ldap_resp = decode_message_fast(unprocessed[:length])
+ dict_response = self.connection.strategy.decode_response_fast(ldap_resp)
+ else:
+ ldap_resp = decoder.decode(unprocessed[:length], asn1Spec=LDAPMessage())[0]
+ dict_response = self.connection.strategy.decode_response(ldap_resp)
+ message_id = int(ldap_resp['messageID'])
+ if log_enabled(NETWORK):
+ log(NETWORK, 'received 1 ldap message via <%s>', self.connection)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
+ if dict_response['type'] == 'extendedResp' and (dict_response['responseName'] == '1.3.6.1.4.1.1466.20037' or hasattr(self.connection, '_awaiting_for_async_start_tls')):
+ if dict_response['result'] == 0: # StartTls in progress
+ if self.connection.server.tls:
+ self.connection.server.tls._start_tls(self.connection)
+ else:
+ self.connection.last_error = 'no Tls object defined in Server'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSSLConfigurationError(self.connection.last_error)
+ else:
+ self.connection.last_error = 'asynchronous StartTls failed'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPStartTLSError(self.connection.last_error)
+ del self.connection._awaiting_for_async_start_tls
+ if message_id != 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
+ with self.connection.strategy.async_lock:
+ if message_id in self.connection.strategy._responses:
+ self.connection.strategy._responses[message_id].append(dict_response)
+ else:
+ self.connection.strategy._responses[message_id] = [dict_response]
+ if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
+ self.connection.strategy._responses[message_id].append(RESPONSE_COMPLETE)
+ self.connection.strategy.set_event_for_message(message_id)
+
+ if self.connection.strategy.can_stream: # for AsyncStreamStrategy, used for PersistentSearch
+ self.connection.strategy.accumulate_stream(message_id, dict_response)
+ unprocessed = unprocessed[length:]
+ get_more_data = False if unprocessed else True
+ listen = True if self.connection.listening or unprocessed else False
+ else: # Unsolicited Notification
+ if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
+ listen = False
+ else:
+ self.connection.last_error = 'unknown unsolicited notification from server'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPStartTLSError(self.connection.last_error)
+ self.connection.strategy.close()
+
+ def __init__(self, ldap_connection):
+ BaseStrategy.__init__(self, ldap_connection)
+ self.sync = False
+ self.no_real_dsa = False
+ self.pooled = False
+ self._responses = None
+ self._requests = None
+ self.can_stream = False
+ self.receiver = None
+ self.async_lock = Lock()
+ self.event_lock = Lock()
+ self._events = {}
+
+ def open(self, reset_usage=True, read_server_info=True):
+ """
+ Open connection and start listen on the socket in a different thread
+ """
+ with self.connection.connection_lock:
+ self._responses = dict()
+ self._requests = dict()
+ BaseStrategy.open(self, reset_usage, read_server_info)
+
+ if read_server_info:
+ try:
+ self.connection.refresh_server_info()
+ except LDAPOperationResult: # catch errors from server if raise_exception = True
+ self.connection.server._dsa_info = None
+ self.connection.server._schema_info = None
+
+ def close(self):
+ """
+ Close connection and stop socket thread
+ """
+ with self.connection.connection_lock:
+ BaseStrategy.close(self)
+
+ def _add_event_for_message(self, message_id):
+ with self.event_lock:
+ # Should have the check here because the receiver thread may has created it
+ if message_id not in self._events:
+ self._events[message_id] = Event()
+
+ def set_event_for_message(self, message_id):
+ with self.event_lock:
+ # The receiver thread may receive the response before the sender set the event for the message_id,
+ # so we have to check if the event exists
+ if message_id not in self._events:
+ self._events[message_id] = Event()
+ self._events[message_id].set()
+
+ def _get_event_for_message(self, message_id):
+ with self.event_lock:
+ if message_id not in self._events:
+ raise RuntimeError('Event for message[{}] should have been created before accessing'.format(message_id))
+ return self._events[message_id]
+
+ def post_send_search(self, message_id):
+ """
+ Clears connection.response and returns messageId
+ """
+ self.connection.response = None
+ self.connection.request = None
+ self.connection.result = None
+ self._add_event_for_message(message_id)
+ return message_id
+
+ def post_send_single_response(self, message_id):
+ """
+ Clears connection.response and returns messageId.
+ """
+ self.connection.response = None
+ self.connection.request = None
+ self.connection.result = None
+ self._add_event_for_message(message_id)
+ return message_id
+
+ def _start_listen(self):
+ """
+ Start thread in daemon mode
+ """
+ if not self.connection.listening:
+ self.receiver = AsyncStrategy.ReceiverSocketThread(self.connection)
+ self.connection.listening = True
+ self.receiver.daemon = True
+ self.receiver.start()
+
+ def _get_response(self, message_id, timeout):
+ """
+ Performs the capture of LDAP response for this strategy
+ The response is only complete after the event been set
+ """
+ event = self._get_event_for_message(message_id)
+ flag = event.wait(timeout)
+ if not flag:
+ # timeout
+ return None
+
+ # In this stage we could ensure the response is already there
+ self._events.pop(message_id)
+ with self.async_lock:
+ return self._responses.pop(message_id)
+
+ def receiving(self):
+ raise NotImplementedError
+
+ def get_stream(self):
+ raise NotImplementedError
+
+ def set_stream(self, value):
+ raise NotImplementedError
diff --git a/ldap3/strategy/base.py b/ldap3/strategy/base.py
index f09c4bc..568459e 100644
--- a/ldap3/strategy/base.py
+++ b/ldap3/strategy/base.py
@@ -1,874 +1,902 @@
-"""
-"""
-
-# Created on 2013.07.15
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more dectails.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-import socket
-from struct import pack
-from platform import system
-from sys import exc_info
-from time import sleep
-from random import choice
-from datetime import datetime
-
-from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
-from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL
-from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\
- LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \
- LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError
-from ..utils.uri import parse_uri
-from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry
-from ..operation.add import add_response_to_dict, add_request_to_dict
-from ..operation.modify import modify_request_to_dict, modify_response_to_dict
-from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\
- search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\
- search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast
-from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \
- sicily_bind_response_to_dict_fast
-from ..operation.compare import compare_response_to_dict, compare_request_to_dict
-from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast
-from ..core.server import Server
-from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict
-from ..operation.delete import delete_response_to_dict, delete_request_to_dict
-from ..protocol.convert import prepare_changes_for_request, build_controls_list
-from ..operation.abandon import abandon_request_to_dict
-from ..core.tls import Tls
-from ..protocol.oid import Oids
-from ..protocol.rfc2696 import RealSearchControlValue
-from ..protocol.microsoft import DirSyncControlResponseValue
-from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message
-from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence
-from ..utils.conv import to_unicode
-
-SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER'
-TRANSACTION_ERROR = 'TRANSACTION_ERROR'
-RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE'
-
-
-# noinspection PyProtectedMember
-class BaseStrategy(object):
- """
- Base class for connection strategy
- """
-
- def __init__(self, ldap_connection):
- self.connection = ldap_connection
- self._outstanding = None
- self._referrals = []
- self.sync = None # indicates a synchronous connection
- self.no_real_dsa = None # indicates a connection to a fake LDAP server
- self.pooled = None # Indicates a connection with a connection pool
- self.can_stream = None # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen()
- self.referral_cache = {}
- if log_enabled(BASIC):
- log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
-
- def __str__(self):
- s = [
- str(self.connection) if self.connection else 'None',
- 'sync' if self.sync else 'async',
- 'no real DSA' if self.no_real_dsa else 'real DSA',
- 'pooled' if self.pooled else 'not pooled',
- 'can stream output' if self.can_stream else 'cannot stream output',
- ]
- return ' - '.join(s)
-
- def open(self, reset_usage=True, read_server_info=True):
- """
- Open a socket to a server. Choose a server from the server pool if available
- """
- if log_enabled(NETWORK):
- log(NETWORK, 'opening connection for <%s>', self.connection)
- if self.connection.lazy and not self.connection._executing_deferred:
- self.connection._deferred_open = True
- self.connection.closed = False
- if log_enabled(NETWORK):
- log(NETWORK, 'deferring open connection for <%s>', self.connection)
- else:
- if not self.connection.closed and not self.connection._executing_deferred: # try to close connection if still open
- self.close()
-
- self._outstanding = dict()
- if self.connection.usage:
- if reset_usage or not self.connection._usage.initial_connection_start_time:
- self.connection._usage.start()
-
- if self.connection.server_pool:
- new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available
- if self.connection.server != new_server:
- self.connection.server = new_server
- if self.connection.usage:
- self.connection._usage.servers_from_pool += 1
-
- exception_history = []
- if not self.no_real_dsa: # tries to connect to a real server
- for candidate_address in self.connection.server.candidate_addresses():
- try:
- if log_enabled(BASIC):
- log(BASIC, 'try to open candidate address %s', candidate_address[:-2])
- self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc)
- self.connection.server.current_address = candidate_address
- self.connection.server.update_availability(candidate_address, True)
- break
- except Exception:
- self.connection.server.update_availability(candidate_address, False)
- exception_history.append((datetime.now(), exc_info()[0], exc_info()[1], candidate_address[4]))
-
- if not self.connection.server.current_address and exception_history:
- if len(exception_history) == 1: # only one exception, reraise
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', exception_history[0][1](exception_history[0][2]), self.connection)
- raise exception_history[0][1](exception_history[0][2])
- else:
- if log_enabled(ERROR):
- log(ERROR, 'unable to open socket for <%s>', self.connection)
- raise LDAPSocketOpenError('unable to open socket', exception_history)
- elif not self.connection.server.current_address:
- if log_enabled(ERROR):
- log(ERROR, 'invalid server address for <%s>', self.connection)
- raise LDAPSocketOpenError('invalid server address')
-
- self.connection._deferred_open = False
- self._start_listen()
- self.connection.do_auto_bind()
- if log_enabled(NETWORK):
- log(NETWORK, 'connection open for <%s>', self.connection)
-
- def close(self):
- """
- Close connection
- """
- if log_enabled(NETWORK):
- log(NETWORK, 'closing connection for <%s>', self.connection)
- if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open):
- self.connection.listening = False
- self.connection.closed = True
- if log_enabled(NETWORK):
- log(NETWORK, 'deferred connection closed for <%s>', self.connection)
- else:
- if not self.connection.closed:
- self._stop_listen()
- if not self. no_real_dsa:
- self._close_socket()
- if log_enabled(NETWORK):
- log(NETWORK, 'connection closed for <%s>', self.connection)
-
- self.connection.bound = False
- self.connection.request = None
- self.connection.response = None
- self.connection.tls_started = False
- self._outstanding = None
- self._referrals = []
-
- if not self.connection.strategy.no_real_dsa:
- self.connection.server.current_address = None
- if self.connection.usage:
- self.connection._usage.stop()
-
- def _open_socket(self, address, use_ssl=False, unix_socket=False):
- """
- Tries to open and connect a socket to a Server
- raise LDAPExceptionError if unable to open or connect socket
- """
- exc = None
- try:
- self.connection.socket = socket.socket(*address[:3])
- except Exception as e:
- self.connection.last_error = 'socket creation error: ' + str(e)
- exc = e
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
-
- raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
-
- try: # set socket timeout for opening connection
- if self.connection.server.connect_timeout:
- self.connection.socket.settimeout(self.connection.server.connect_timeout)
- self.connection.socket.connect(address[4])
- except socket.error as e:
- self.connection.last_error = 'socket connection error while opening: ' + str(e)
- exc = e
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
-
- # Set connection recv timeout (must be set after connect,
- # because socket.settimeout() affects both, connect() as
- # well as recv(). Set it before tls.wrap_socket() because
- # the recv timeout should take effect during the TLS
- # handshake.
- if self.connection.receive_timeout is not None:
- try: # set receive timeout for the connection socket
- self.connection.socket.settimeout(self.connection.receive_timeout)
- if system().lower() == 'windows':
- self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout))
- else:
- self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0))
- except socket.error as e:
- self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e)
- exc = e
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
-
- if use_ssl:
- try:
- self.connection.server.tls.wrap_socket(self.connection, do_handshake=True)
- if self.connection.usage:
- self.connection._usage.wrapped_sockets += 1
- except Exception as e:
- self.connection.last_error = 'socket ssl wrapping error: ' + str(e)
- exc = e
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
-
- if self.connection.usage:
- self.connection._usage.open_sockets += 1
-
- self.connection.closed = False
-
- def _close_socket(self):
- """
- Try to close a socket
- don't raise exception if unable to close socket, assume socket is already closed
- """
-
- try:
- self.connection.socket.shutdown(socket.SHUT_RDWR)
- except Exception:
- pass
-
- try:
- self.connection.socket.close()
- except Exception:
- pass
-
- self.connection.socket = None
- self.connection.closed = True
-
- if self.connection.usage:
- self.connection._usage.closed_sockets += 1
-
- def _stop_listen(self):
- self.connection.listening = False
-
- def send(self, message_type, request, controls=None):
- """
- Send an LDAP message
- Returns the message_id
- """
- self.connection.request = None
- if self.connection.listening:
- if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1)
- self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSASLBindInProgressError(self.connection.last_error)
- message_id = self.connection.server.next_message_id()
- ldap_message = LDAPMessage()
- ldap_message['messageID'] = MessageID(message_id)
- ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
- message_controls = build_controls_list(controls)
- if message_controls is not None:
- ldap_message['controls'] = message_controls
- self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
- self._outstanding[message_id] = self.connection.request
- self.sending(ldap_message)
- else:
- self.connection.last_error = 'unable to send message, socket is not open'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSocketOpenError(self.connection.last_error)
-
- return message_id
-
- def get_response(self, message_id, timeout=None, get_request=False):
- """
- Get response LDAP messages
- Responses are returned by the underlying connection strategy
- Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response
- Result is stored in connection.result
- Responses without result is stored in connection.response
- A tuple (responses, result) is returned
- """
- conf_sleep_interval = get_config_parameter('RESPONSE_SLEEPTIME')
- if timeout is None:
- timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
- response = None
- result = None
- request = None
- if self._outstanding and message_id in self._outstanding:
- while timeout >= 0: # waiting for completed message to appear in responses
- responses = self._get_response(message_id)
- if not responses:
- sleep(conf_sleep_interval)
- timeout -= conf_sleep_interval
- continue
-
- if responses == SESSION_TERMINATED_BY_SERVER:
- try: # try to close the session but don't raise any error if server has already closed the session
- self.close()
- except (socket.error, LDAPExceptionError):
- pass
- self.connection.last_error = 'session terminated by server'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSessionTerminatedByServerError(self.connection.last_error)
- elif responses == TRANSACTION_ERROR: # Novell LDAP Transaction unsolicited notification
- self.connection.last_error = 'transaction error'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPTransactionError(self.connection.last_error)
-
- # if referral in response opens a new connection to resolve referrals if requested
-
- if responses[-2]['result'] == RESULT_REFERRAL:
- if self.connection.usage:
- self.connection._usage.referrals_received += 1
- if self.connection.auto_referrals:
- ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals'])
- if ref_response is not None:
- responses = ref_response + [ref_result]
- responses.append(RESPONSE_COMPLETE)
- elif ref_result is not None:
- responses = [ref_result, RESPONSE_COMPLETE]
-
- self._referrals = []
-
- if responses:
- result = responses[-2]
- response = responses[:-2]
- self.connection.result = None
- self.connection.response = None
- break
-
- if timeout <= 0:
- if log_enabled(ERROR):
- log(ERROR, 'socket timeout, no response from server for <%s>', self.connection)
- raise LDAPResponseTimeoutError('no response from server')
-
- if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
- self._outstanding.pop(message_id)
- raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
-
- # checks if any response has a range tag
- # self._auto_range_searching is set as a flag to avoid recursive searches
- if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)):
- self._auto_range_searching = result.copy()
- temp_response = response[:] # copy
- self.do_search_on_auto_range(self._outstanding[message_id], response)
- for resp in temp_response:
- if resp['type'] == 'searchResEntry':
- keys = [key for key in resp['raw_attributes'] if ';range=' in key]
- for key in keys:
- del resp['raw_attributes'][key]
- del resp['attributes'][key]
- response = temp_response
- result = self._auto_range_searching
- del self._auto_range_searching
-
- if self.connection.empty_attributes:
- for entry in response:
- if entry['type'] == 'searchResEntry':
- for attribute_type in self._outstanding[message_id]['attributes']:
- if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
- entry['raw_attributes'][attribute_type] = list()
- entry['attributes'][attribute_type] = list()
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self)
- if not self.connection.auto_range:
- attrs_to_remove = []
- # removes original empty attribute in case a range tag is returned
- for attribute_type in entry['attributes']:
- if ';range' in attribute_type.lower():
- orig_attr, _, _ = attribute_type.partition(';')
- attrs_to_remove.append(orig_attr)
- for attribute_type in attrs_to_remove:
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self)
- del entry['raw_attributes'][attribute_type]
- del entry['attributes'][attribute_type]
-
- request = self._outstanding.pop(message_id)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'message id not in outstanding queue for <%s>', self.connection)
- raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
-
- if get_request:
- return response, result, request
- else:
- return response, result
-
- @staticmethod
- def compute_ldap_message_size(data):
- """
- Compute LDAP Message size according to BER definite length rules
- Returns -1 if too few data to compute message length
- """
- if isinstance(data, str): # fix for Python 2, data is string not bytes
- data = bytearray(data) # Python 2 bytearray is equivalent to Python 3 bytes
-
- ret_value = -1
- if len(data) > 2:
- if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
- ret_value = data[1] + 2
- else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
- bytes_length = data[1] - 128
- if len(data) >= bytes_length + 2:
- value_length = 0
- cont = bytes_length
- for byte in data[2:2 + bytes_length]:
- cont -= 1
- value_length += byte * (256 ** cont)
- ret_value = value_length + 2 + bytes_length
-
- return ret_value
-
- def decode_response(self, ldap_message):
- """
- Convert received LDAPMessage to a dict
- """
- message_type = ldap_message.getComponentByName('protocolOp').getName()
- component = ldap_message['protocolOp'].getComponent()
- controls = ldap_message['controls']
- if message_type == 'bindResponse':
- if not bytes(component['matchedDN']).startswith(b'NTLM'): # patch for microsoft ntlm authentication
- result = bind_response_to_dict(component)
- else:
- result = sicily_bind_response_to_dict(component)
- elif message_type == 'searchResEntry':
- result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
- elif message_type == 'searchResDone':
- result = search_result_done_response_to_dict(component)
- elif message_type == 'searchResRef':
- result = search_result_reference_response_to_dict(component)
- elif message_type == 'modifyResponse':
- result = modify_response_to_dict(component)
- elif message_type == 'addResponse':
- result = add_response_to_dict(component)
- elif message_type == 'delResponse':
- result = delete_response_to_dict(component)
- elif message_type == 'modDNResponse':
- result = modify_dn_response_to_dict(component)
- elif message_type == 'compareResponse':
- result = compare_response_to_dict(component)
- elif message_type == 'extendedResp':
- result = extended_response_to_dict(component)
- elif message_type == 'intermediateResponse':
- result = intermediate_response_to_dict(component)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection)
- raise LDAPUnknownResponseError('unknown response')
- result['type'] = message_type
- if controls:
- result['controls'] = dict()
- for control in controls:
- decoded_control = self.decode_control(control)
- result['controls'][decoded_control[0]] = decoded_control[1]
- return result
-
- def decode_response_fast(self, ldap_message):
- """
- Convert received LDAPMessage from fast ber decoder to a dict
- """
- if ldap_message['protocolOp'] == 1: # bindResponse
- if not ldap_message['payload'][1][3].startswith(b'NTLM'): # patch for microsoft ntlm authentication
- result = bind_response_to_dict_fast(ldap_message['payload'])
- else:
- result = sicily_bind_response_to_dict_fast(ldap_message['payload'])
- result['type'] = 'bindResponse'
- elif ldap_message['protocolOp'] == 4: # searchResEntry'
- result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
- result['type'] = 'searchResEntry'
- elif ldap_message['protocolOp'] == 5: # searchResDone
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'searchResDone'
- elif ldap_message['protocolOp'] == 19: # searchResRef
- result = search_result_reference_response_to_dict_fast(ldap_message['payload'])
- result['type'] = 'searchResRef'
- elif ldap_message['protocolOp'] == 7: # modifyResponse
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'modifyResponse'
- elif ldap_message['protocolOp'] == 9: # addResponse
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'addResponse'
- elif ldap_message['protocolOp'] == 11: # delResponse
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'delResponse'
- elif ldap_message['protocolOp'] == 13: # modDNResponse
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'modDNResponse'
- elif ldap_message['protocolOp'] == 15: # compareResponse
- result = ldap_result_to_dict_fast(ldap_message['payload'])
- result['type'] = 'compareResponse'
- elif ldap_message['protocolOp'] == 24: # extendedResp
- result = extended_response_to_dict_fast(ldap_message['payload'])
- result['type'] = 'extendedResp'
- elif ldap_message['protocolOp'] == 25: # intermediateResponse
- result = intermediate_response_to_dict_fast(ldap_message['payload'])
- result['type'] = 'intermediateResponse'
- else:
- if log_enabled(ERROR):
- log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection)
- raise LDAPUnknownResponseError('unknown response')
- if ldap_message['controls']:
- result['controls'] = dict()
- for control in ldap_message['controls']:
- decoded_control = self.decode_control_fast(control[3])
- result['controls'][decoded_control[0]] = decoded_control[1]
- return result
-
- @staticmethod
- def decode_control(control):
- """
- decode control, return a 2-element tuple where the first element is the control oid
- and the second element is a dictionary with description (from Oids), criticality and decoded control value
- """
- control_type = str(control['controlType'])
- criticality = bool(control['criticality'])
- control_value = bytes(control['controlValue'])
- unprocessed = None
- if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
- control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue())
- control_value = dict()
- control_value['size'] = int(control_resp['size'])
- control_value['cookie'] = bytes(control_resp['cookie'])
- elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
- control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue())
- control_value = dict()
- control_value['more_results'] = bool(control_resp['MoreResults']) # more_result if nonzero
- control_value['cookie'] = bytes(control_resp['CookieServer'])
- elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
- control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry())
- control_value = dict()
- control_value['result'] = attributes_to_dict(control_resp['attributes'])
- if unprocessed:
- if log_enabled(ERROR):
- log(ERROR, 'unprocessed control response in substrate')
- raise LDAPControlError('unprocessed control response in substrate')
- return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
-
- @staticmethod
- def decode_control_fast(control):
- """
- decode control, return a 2-element tuple where the first element is the control oid
- and the second element is a dictionary with description (from Oids), criticality and decoded control value
- """
- control_type = str(to_unicode(control[0][3], from_server=True))
- criticality = False
- control_value = None
- for r in control[1:]:
- if r[2] == 4: # controlValue
- control_value = r[3]
- else:
- criticality = False if r[3] == 0 else True # criticality (booleand default to False)
- if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
- control_resp = decode_sequence(control_value, 0, len(control_value))
- control_value = dict()
- control_value['size'] = int(control_resp[0][3][0][3])
- control_value['cookie'] = bytes(control_resp[0][3][1][3])
- elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
- control_resp = decode_sequence(control_value, 0, len(control_value))
- control_value = dict()
- control_value['more_results'] = True if control_resp[0][3][0][3] else False # more_result if nonzero
- control_value['cookie'] = control_resp[0][3][2][3]
- elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
- control_resp = decode_sequence(control_value, 0, len(control_value))
- control_value = dict()
- control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3])
- return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
-
- @staticmethod
- def decode_request(message_type, component, controls=None):
- # message_type = ldap_message.getComponentByName('protocolOp').getName()
- # component = ldap_message['protocolOp'].getComponent()
- if message_type == 'bindRequest':
- result = bind_request_to_dict(component)
- elif message_type == 'unbindRequest':
- result = dict()
- elif message_type == 'addRequest':
- result = add_request_to_dict(component)
- elif message_type == 'compareRequest':
- result = compare_request_to_dict(component)
- elif message_type == 'delRequest':
- result = delete_request_to_dict(component)
- elif message_type == 'extendedReq':
- result = extended_request_to_dict(component)
- elif message_type == 'modifyRequest':
- result = modify_request_to_dict(component)
- elif message_type == 'modDNRequest':
- result = modify_dn_request_to_dict(component)
- elif message_type == 'searchRequest':
- result = search_request_to_dict(component)
- elif message_type == 'abandonRequest':
- result = abandon_request_to_dict(component)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'unknown request <%s>', message_type)
- raise LDAPUnknownRequestError('unknown request')
- result['type'] = message_type
- result['controls'] = controls
-
- return result
-
- def valid_referral_list(self, referrals):
- referral_list = []
- for referral in referrals:
- candidate_referral = parse_uri(referral)
- if candidate_referral:
- for ref_host in self.connection.server.allowed_referral_hosts:
- if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*':
- if candidate_referral['host'] not in self._referrals:
- candidate_referral['anonymousBindOnly'] = not ref_host[1]
- referral_list.append(candidate_referral)
- break
-
- return referral_list
-
- def do_next_range_search(self, request, response, attr_name):
- done = False
- current_response = response
- while not done:
- attr_type, _, returned_range = attr_name.partition(';range=')
- _, _, high_range = returned_range.partition('-')
- response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name]
- response['attributes'][attr_type] += current_response['attributes'][attr_name]
- if high_range != '*':
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection)
- requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*'
- result = self.connection.search(search_base=response['dn'],
- search_filter='(objectclass=*)',
- search_scope=BASE,
- dereference_aliases=request['dereferenceAlias'],
- attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*'])
- if isinstance(result, bool):
- if result:
- current_response = self.connection.response[0]
- else:
- done = True
- else:
- current_response, _ = self.get_response(result)
- current_response = current_response[0]
-
- if not done:
- if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0:
- del current_response['raw_attributes'][requested_range]
- del current_response['attributes'][requested_range]
- attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0]
- continue
-
- done = True
-
- def do_search_on_auto_range(self, request, response):
- for resp in [r for r in response if r['type'] == 'searchResEntry']:
- for attr_name in list(resp['raw_attributes'].keys()): # generate list to avoid changing of dict size error
- if ';range=' in attr_name:
- attr_type, _, _ = attr_name.partition(';range=')
- if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None:
- resp['raw_attributes'][attr_type] = list()
- if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None:
- resp['attributes'][attr_type] = list()
- self.do_next_range_search(request, resp, attr_name)
-
- def do_operation_on_referral(self, request, referrals):
- if log_enabled(PROTOCOL):
- log(PROTOCOL, 'following referral for <%s>', self.connection)
- valid_referral_list = self.valid_referral_list(referrals)
- if valid_referral_list:
- preferred_referral_list = [referral for referral in valid_referral_list if referral['ssl'] == self.connection.server.ssl]
- selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(valid_referral_list)
-
- cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl'])
- if self.connection.use_referral_cache and cachekey in self.referral_cache:
- referral_connection = self.referral_cache[cachekey]
- else:
- referral_server = Server(host=selected_referral['host'],
- port=selected_referral['port'] or self.connection.server.port,
- use_ssl=selected_referral['ssl'],
- get_info=self.connection.server.get_info,
- formatter=self.connection.server.custom_formatter,
- connect_timeout=self.connection.server.connect_timeout,
- mode=self.connection.server.mode,
- allowed_referral_hosts=self.connection.server.allowed_referral_hosts,
- tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file,
- local_certificate_file=self.connection.server.tls.certificate_file,
- validate=self.connection.server.tls.validate,
- version=self.connection.server.tls.version,
- ca_certs_file=self.connection.server.tls.ca_certs_file) if selected_referral['ssl'] else None)
-
- from ..core.connection import Connection
-
- referral_connection = Connection(server=referral_server,
- user=self.connection.user if not selected_referral['anonymousBindOnly'] else None,
- password=self.connection.password if not selected_referral['anonymousBindOnly'] else None,
- version=self.connection.version,
- authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS,
- client_strategy=SYNC,
- auto_referrals=True,
- read_only=self.connection.read_only,
- check_names=self.connection.check_names,
- raise_exceptions=self.connection.raise_exceptions,
- fast_decoder=self.connection.fast_decoder,
- receive_timeout=self.connection.receive_timeout,
- sasl_mechanism=self.connection.sasl_mechanism,
- sasl_credentials=self.connection.sasl_credentials)
-
- if self.connection.usage:
- self.connection._usage.referrals_connections += 1
-
- referral_connection.open()
- referral_connection.strategy._referrals = self._referrals
- if self.connection.tls_started and not referral_server.ssl: # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection
- referral_connection.start_tls()
-
- if self.connection.bound:
- referral_connection.bind()
-
- if self.connection.usage:
- self.connection._usage.referrals_followed += 1
-
- if request['type'] == 'searchRequest':
- referral_connection.search(selected_referral['base'] or request['base'],
- selected_referral['filter'] or request['filter'],
- selected_referral['scope'] or request['scope'],
- request['dereferenceAlias'],
- selected_referral['attributes'] or request['attributes'],
- request['sizeLimit'],
- request['timeLimit'],
- request['typesOnly'],
- controls=request['controls'])
- elif request['type'] == 'addRequest':
- referral_connection.add(selected_referral['base'] or request['entry'],
- None,
- request['attributes'],
- controls=request['controls'])
- elif request['type'] == 'compareRequest':
- referral_connection.compare(selected_referral['base'] or request['entry'],
- request['attribute'],
- request['value'],
- controls=request['controls'])
- elif request['type'] == 'delRequest':
- referral_connection.delete(selected_referral['base'] or request['entry'],
- controls=request['controls'])
- elif request['type'] == 'extendedReq':
- referral_connection.extended(request['name'],
- request['value'],
- controls=request['controls'],
- no_encode=True
- )
- elif request['type'] == 'modifyRequest':
- referral_connection.modify(selected_referral['base'] or request['entry'],
- prepare_changes_for_request(request['changes']),
- controls=request['controls'])
- elif request['type'] == 'modDNRequest':
- referral_connection.modify_dn(selected_referral['base'] or request['entry'],
- request['newRdn'],
- request['deleteOldRdn'],
- request['newSuperior'],
- controls=request['controls'])
- else:
- self.connection.last_error = 'referral operation not permitted'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPReferralError(self.connection.last_error)
-
- response = referral_connection.response
- result = referral_connection.result
- if self.connection.use_referral_cache:
- self.referral_cache[cachekey] = referral_connection
- else:
- referral_connection.unbind()
- else:
- response = None
- result = None
-
- return response, result
-
- def sending(self, ldap_message):
- exc = None
- if log_enabled(NETWORK):
- log(NETWORK, 'sending 1 ldap message for <%s>', self.connection)
- try:
- encoded_message = encode(ldap_message)
- self.connection.socket.sendall(encoded_message)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>'))
- if log_enabled(NETWORK):
- log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection)
- except socket.error as e:
- self.connection.last_error = 'socket sending error' + str(e)
- exc = e
- encoded_message = None
-
- if exc:
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error)
-
- if self.connection.usage:
- self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message))
-
- def _start_listen(self):
- # overridden on strategy class
- raise NotImplementedError
-
- def _get_response(self, message_id):
- # overridden in strategy class
- raise NotImplementedError
-
- def receiving(self):
- # overridden in strategy class
- raise NotImplementedError
-
- def post_send_single_response(self, message_id):
- # overridden in strategy class
- raise NotImplementedError
-
- def post_send_search(self, message_id):
- # overridden in strategy class
- raise NotImplementedError
-
- def get_stream(self):
- raise NotImplementedError
-
- def set_stream(self, value):
- raise NotImplementedError
-
- def unbind_referral_cache(self):
- while len(self.referral_cache) > 0:
- cachekey, referral_connection = self.referral_cache.popitem()
- referral_connection.unbind()
+"""
+"""
+
+# Created on 2013.07.15
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more dectails.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import socket
+from struct import pack
+from platform import system
+from time import sleep
+from random import choice
+
+from .. import SYNC, ANONYMOUS, get_config_parameter, BASE, ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES
+from ..core.results import DO_NOT_RAISE_EXCEPTIONS, RESULT_REFERRAL
+from ..core.exceptions import LDAPOperationResult, LDAPSASLBindInProgressError, LDAPSocketOpenError, LDAPSessionTerminatedByServerError,\
+ LDAPUnknownResponseError, LDAPUnknownRequestError, LDAPReferralError, communication_exception_factory, \
+ LDAPSocketSendError, LDAPExceptionError, LDAPControlError, LDAPResponseTimeoutError, LDAPTransactionError
+from ..utils.uri import parse_uri
+from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID, SearchResultEntry
+from ..operation.add import add_response_to_dict, add_request_to_dict
+from ..operation.modify import modify_request_to_dict, modify_response_to_dict
+from ..operation.search import search_result_reference_response_to_dict, search_result_done_response_to_dict,\
+ search_result_entry_response_to_dict, search_request_to_dict, search_result_entry_response_to_dict_fast,\
+ search_result_reference_response_to_dict_fast, attributes_to_dict, attributes_to_dict_fast
+from ..operation.bind import bind_response_to_dict, bind_request_to_dict, sicily_bind_response_to_dict, bind_response_to_dict_fast, \
+ sicily_bind_response_to_dict_fast
+from ..operation.compare import compare_response_to_dict, compare_request_to_dict
+from ..operation.extended import extended_request_to_dict, extended_response_to_dict, intermediate_response_to_dict, extended_response_to_dict_fast, intermediate_response_to_dict_fast
+from ..core.server import Server
+from ..operation.modifyDn import modify_dn_request_to_dict, modify_dn_response_to_dict
+from ..operation.delete import delete_response_to_dict, delete_request_to_dict
+from ..protocol.convert import prepare_changes_for_request, build_controls_list
+from ..operation.abandon import abandon_request_to_dict
+from ..core.tls import Tls
+from ..protocol.oid import Oids
+from ..protocol.rfc2696 import RealSearchControlValue
+from ..protocol.microsoft import DirSyncControlResponseValue
+from ..utils.log import log, log_enabled, ERROR, BASIC, PROTOCOL, NETWORK, EXTENDED, format_ldap_message
+from ..utils.asn1 import encode, decoder, ldap_result_to_dict_fast, decode_sequence
+from ..utils.conv import to_unicode
+
+SESSION_TERMINATED_BY_SERVER = 'TERMINATED_BY_SERVER'
+TRANSACTION_ERROR = 'TRANSACTION_ERROR'
+RESPONSE_COMPLETE = 'RESPONSE_FROM_SERVER_COMPLETE'
+
+
+# noinspection PyProtectedMember
+class BaseStrategy(object):
+ """
+ Base class for connection strategy
+ """
+
+ def __init__(self, ldap_connection):
+ self.connection = ldap_connection
+ self._outstanding = None
+ self._referrals = []
+ self.sync = None # indicates a synchronous connection
+ self.no_real_dsa = None # indicates a connection to a fake LDAP server
+ self.pooled = None # Indicates a connection with a connection pool
+ self.can_stream = None # indicates if a strategy keeps a stream of responses (i.e. LdifProducer can accumulate responses with a single header). Stream must be initialized and closed in _start_listen() and _stop_listen()
+ self.referral_cache = {}
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
+
+ def __str__(self):
+ s = [
+ str(self.connection) if self.connection else 'None',
+ 'sync' if self.sync else 'async',
+ 'no real DSA' if self.no_real_dsa else 'real DSA',
+ 'pooled' if self.pooled else 'not pooled',
+ 'can stream output' if self.can_stream else 'cannot stream output',
+ ]
+ return ' - '.join(s)
+
+ def open(self, reset_usage=True, read_server_info=True):
+ """
+ Open a socket to a server. Choose a server from the server pool if available
+ """
+ if log_enabled(NETWORK):
+ log(NETWORK, 'opening connection for <%s>', self.connection)
+ if self.connection.lazy and not self.connection._executing_deferred:
+ self.connection._deferred_open = True
+ self.connection.closed = False
+ if log_enabled(NETWORK):
+ log(NETWORK, 'deferring open connection for <%s>', self.connection)
+ else:
+ if not self.connection.closed and not self.connection._executing_deferred: # try to close connection if still open
+ self.close()
+
+ self._outstanding = dict()
+ if self.connection.usage:
+ if reset_usage or not self.connection._usage.initial_connection_start_time:
+ self.connection._usage.start()
+
+ if self.connection.server_pool:
+ new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available
+ if self.connection.server != new_server:
+ self.connection.server = new_server
+ if self.connection.usage:
+ self.connection._usage.servers_from_pool += 1
+
+ exception_history = []
+ if not self.no_real_dsa: # tries to connect to a real server
+ for candidate_address in self.connection.server.candidate_addresses():
+ try:
+ if log_enabled(BASIC):
+ log(BASIC, 'try to open candidate address %s', candidate_address[:-2])
+ self._open_socket(candidate_address, self.connection.server.ssl, unix_socket=self.connection.server.ipc)
+ self.connection.server.current_address = candidate_address
+ self.connection.server.update_availability(candidate_address, True)
+ break
+ except Exception as e:
+ self.connection.server.update_availability(candidate_address, False)
+ # exception_history.append((datetime.now(), exc_type, exc_value, candidate_address[4]))
+ exception_history.append((type(e)(str(e)), candidate_address[4]))
+ if not self.connection.server.current_address and exception_history:
+ if len(exception_history) == 1: # only one exception, reraise
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', str(exception_history[0][0]) + ' ' + str((exception_history[0][1])), self.connection)
+ raise exception_history[0][0]
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'unable to open socket for <%s>', self.connection)
+ raise LDAPSocketOpenError('unable to open socket', exception_history)
+ if log_enabled(ERROR):
+ log(ERROR, 'unable to open socket for <%s>', self.connection)
+ raise LDAPSocketOpenError('unable to open socket', exception_history)
+ elif not self.connection.server.current_address:
+ if log_enabled(ERROR):
+ log(ERROR, 'invalid server address for <%s>', self.connection)
+ raise LDAPSocketOpenError('invalid server address')
+
+ self.connection._deferred_open = False
+ self._start_listen()
+ # self.connection.do_auto_bind()
+ if log_enabled(NETWORK):
+ log(NETWORK, 'connection open for <%s>', self.connection)
+
+ def close(self):
+ """
+ Close connection
+ """
+ if log_enabled(NETWORK):
+ log(NETWORK, 'closing connection for <%s>', self.connection)
+ if self.connection.lazy and not self.connection._executing_deferred and (self.connection._deferred_bind or self.connection._deferred_open):
+ self.connection.listening = False
+ self.connection.closed = True
+ if log_enabled(NETWORK):
+ log(NETWORK, 'deferred connection closed for <%s>', self.connection)
+ else:
+ if not self.connection.closed:
+ self._stop_listen()
+ if not self. no_real_dsa:
+ self._close_socket()
+ if log_enabled(NETWORK):
+ log(NETWORK, 'connection closed for <%s>', self.connection)
+
+ self.connection.bound = False
+ self.connection.request = None
+ self.connection.response = None
+ self.connection.tls_started = False
+ self._outstanding = None
+ self._referrals = []
+
+ if not self.connection.strategy.no_real_dsa:
+ self.connection.server.current_address = None
+ if self.connection.usage:
+ self.connection._usage.stop()
+
+ def _open_socket(self, address, use_ssl=False, unix_socket=False):
+ """
+ Tries to open and connect a socket to a Server
+ raise LDAPExceptionError if unable to open or connect socket
+ """
+ try:
+ self.connection.socket = socket.socket(*address[:3])
+ except Exception as e:
+ self.connection.last_error = 'socket creation error: ' + str(e)
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
+ raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
+
+ # Try to bind the socket locally before connecting to the remote address
+ # We go through our connection's source ports and try to bind our socket to our connection's source address
+ # with them.
+ # If no source address or ports were specified, this will have the same success/fail result as if we
+ # tried to connect to the remote server without binding locally first.
+ # This is actually a little bit better, as it lets us distinguish the case of "issue binding the socket
+ # locally" from "remote server is unavailable" with more clarity, though this will only really be an
+ # issue when no source address/port is specified if the system checking server availability is running
+ # as a very unprivileged user.
+ last_bind_exc = None
+ socket_bind_succeeded = False
+ for source_port in self.connection.source_port_list:
+ try:
+ self.connection.socket.bind((self.connection.source_address, source_port))
+ socket_bind_succeeded = True
+ break
+ except Exception as bind_ex:
+ last_bind_exc = bind_ex
+ # we'll always end up logging at error level if we cannot bind any ports to the address locally.
+ # but if some work and some don't you probably don't want the ones that don't at ERROR level
+ if log_enabled(NETWORK):
+ log(NETWORK, 'Unable to bind to local address <%s> with source port <%s> due to <%s>',
+ self.connection.source_address, source_port, bind_ex)
+ if not socket_bind_succeeded:
+ self.connection.last_error = 'socket connection error while locally binding: ' + str(last_bind_exc)
+ if log_enabled(ERROR):
+ log(ERROR, 'Unable to locally bind to local address <%s> with any of the source ports <%s> for connection <%s due to <%s>',
+ self.connection.source_address, self.connection.source_port_list, self.connection, last_bind_exc)
+ raise communication_exception_factory(LDAPSocketOpenError, type(last_bind_exc)(str(last_bind_exc)))(last_bind_exc)
+
+ try: # set socket timeout for opening connection
+ if self.connection.server.connect_timeout:
+ self.connection.socket.settimeout(self.connection.server.connect_timeout)
+ self.connection.socket.connect(address[4])
+ except socket.error as e:
+ self.connection.last_error = 'socket connection error while opening: ' + str(e)
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
+ raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
+
+ # Set connection recv timeout (must be set after connect,
+ # because socket.settimeout() affects both, connect() as
+ # well as recv(). Set it before tls.wrap_socket() because
+ # the recv timeout should take effect during the TLS
+ # handshake.
+ if self.connection.receive_timeout is not None:
+ try: # set receive timeout for the connection socket
+ self.connection.socket.settimeout(self.connection.receive_timeout)
+ if system().lower() == 'windows':
+ self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, int(1000 * self.connection.receive_timeout))
+ else:
+ self.connection.socket.setsockopt(socket.SOL_SOCKET, socket.SO_RCVTIMEO, pack('LL', self.connection.receive_timeout, 0))
+ except socket.error as e:
+ self.connection.last_error = 'unable to set receive timeout for socket connection: ' + str(e)
+
+ # if exc:
+ # if log_enabled(ERROR):
+ # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
+
+ if use_ssl:
+ try:
+ self.connection.server.tls.wrap_socket(self.connection, do_handshake=True)
+ if self.connection.usage:
+ self.connection._usage.wrapped_sockets += 1
+ except Exception as e:
+ self.connection.last_error = 'socket ssl wrapping error: ' + str(e)
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketOpenError, exc)(self.connection.last_error)
+ raise communication_exception_factory(LDAPSocketOpenError, type(e)(str(e)))(self.connection.last_error)
+ if self.connection.usage:
+ self.connection._usage.open_sockets += 1
+
+ self.connection.closed = False
+
+ def _close_socket(self):
+ """
+ Try to close a socket
+ don't raise exception if unable to close socket, assume socket is already closed
+ """
+
+ try:
+ self.connection.socket.shutdown(socket.SHUT_RDWR)
+ except Exception:
+ pass
+
+ try:
+ self.connection.socket.close()
+ except Exception:
+ pass
+
+ self.connection.socket = None
+ self.connection.closed = True
+
+ if self.connection.usage:
+ self.connection._usage.closed_sockets += 1
+
+ def _stop_listen(self):
+ self.connection.listening = False
+
+ def send(self, message_type, request, controls=None):
+ """
+ Send an LDAP message
+ Returns the message_id
+ """
+ self.connection.request = None
+ if self.connection.listening:
+ if self.connection.sasl_in_progress and message_type not in ['bindRequest']: # as per RFC4511 (4.2.1)
+ self.connection.last_error = 'cannot send operation requests while SASL bind is in progress'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSASLBindInProgressError(self.connection.last_error)
+ message_id = self.connection.server.next_message_id()
+ ldap_message = LDAPMessage()
+ ldap_message['messageID'] = MessageID(message_id)
+ ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
+ message_controls = build_controls_list(controls)
+ if message_controls is not None:
+ ldap_message['controls'] = message_controls
+ self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
+ self._outstanding[message_id] = self.connection.request
+ self.sending(ldap_message)
+ else:
+ self.connection.last_error = 'unable to send message, socket is not open'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSocketOpenError(self.connection.last_error)
+
+ return message_id
+
+ def get_response(self, message_id, timeout=None, get_request=False):
+ """
+ Get response LDAP messages
+ Responses are returned by the underlying connection strategy
+ Check if message_id LDAP message is still outstanding and wait for timeout to see if it appears in _get_response
+ Result is stored in connection.result
+ Responses without result is stored in connection.response
+ A tuple (responses, result) is returned
+ """
+ if timeout is None:
+ timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
+ response = None
+ result = None
+ request = None
+ if self._outstanding and message_id in self._outstanding:
+ responses = self._get_response(message_id, timeout)
+
+ if not responses:
+ if log_enabled(ERROR):
+ log(ERROR, 'socket timeout, no response from server for <%s>', self.connection)
+ raise LDAPResponseTimeoutError('no response from server')
+
+ if responses == SESSION_TERMINATED_BY_SERVER:
+ try: # try to close the session but don't raise any error if server has already closed the session
+ self.close()
+ except (socket.error, LDAPExceptionError):
+ pass
+ self.connection.last_error = 'session terminated by server'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSessionTerminatedByServerError(self.connection.last_error)
+ elif responses == TRANSACTION_ERROR: # Novell LDAP Transaction unsolicited notification
+ self.connection.last_error = 'transaction error'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPTransactionError(self.connection.last_error)
+
+ # if referral in response opens a new connection to resolve referrals if requested
+
+ if responses[-2]['result'] == RESULT_REFERRAL:
+ if self.connection.usage:
+ self.connection._usage.referrals_received += 1
+ if self.connection.auto_referrals:
+ ref_response, ref_result = self.do_operation_on_referral(self._outstanding[message_id], responses[-2]['referrals'])
+ if ref_response is not None:
+ responses = ref_response + [ref_result]
+ responses.append(RESPONSE_COMPLETE)
+ elif ref_result is not None:
+ responses = [ref_result, RESPONSE_COMPLETE]
+
+ self._referrals = []
+
+ if responses:
+ result = responses[-2]
+ response = responses[:-2]
+ self.connection.result = None
+ self.connection.response = None
+
+ if self.connection.raise_exceptions and result and result['result'] not in DO_NOT_RAISE_EXCEPTIONS:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'operation result <%s> for <%s>', result, self.connection)
+ self._outstanding.pop(message_id)
+ self.connection.result = result.copy()
+ raise LDAPOperationResult(result=result['result'], description=result['description'], dn=result['dn'], message=result['message'], response_type=result['type'])
+
+ # checks if any response has a range tag
+ # self._auto_range_searching is set as a flag to avoid recursive searches
+ if self.connection.auto_range and not hasattr(self, '_auto_range_searching') and any((True for resp in response if 'raw_attributes' in resp for name in resp['raw_attributes'] if ';range=' in name)):
+ self._auto_range_searching = result.copy()
+ temp_response = response[:] # copy
+ if self.do_search_on_auto_range(self._outstanding[message_id], response):
+ for resp in temp_response:
+ if resp['type'] == 'searchResEntry':
+ keys = [key for key in resp['raw_attributes'] if ';range=' in key]
+ for key in keys:
+ del resp['raw_attributes'][key]
+ del resp['attributes'][key]
+ response = temp_response
+ result = self._auto_range_searching
+ del self._auto_range_searching
+
+ if self.connection.empty_attributes:
+ for entry in response:
+ if entry['type'] == 'searchResEntry':
+ for attribute_type in self._outstanding[message_id]['attributes']:
+ if attribute_type not in entry['raw_attributes'] and attribute_type not in (ALL_ATTRIBUTES, ALL_OPERATIONAL_ATTRIBUTES, NO_ATTRIBUTES):
+ entry['raw_attributes'][attribute_type] = list()
+ entry['attributes'][attribute_type] = list()
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'attribute set to empty list for missing attribute <%s> in <%s>', attribute_type, self)
+ if not self.connection.auto_range:
+ attrs_to_remove = []
+ # removes original empty attribute in case a range tag is returned
+ for attribute_type in entry['attributes']:
+ if ';range' in attribute_type.lower():
+ orig_attr, _, _ = attribute_type.partition(';')
+ attrs_to_remove.append(orig_attr)
+ for attribute_type in attrs_to_remove:
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'attribute type <%s> removed in response because of same attribute returned as range by the server in <%s>', attribute_type, self)
+ del entry['raw_attributes'][attribute_type]
+ del entry['attributes'][attribute_type]
+
+ request = self._outstanding.pop(message_id)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'message id not in outstanding queue for <%s>', self.connection)
+ raise(LDAPResponseTimeoutError('message id not in outstanding queue'))
+
+ if get_request:
+ return response, result, request
+ else:
+ return response, result
+
+ @staticmethod
+ def compute_ldap_message_size(data):
+ """
+ Compute LDAP Message size according to BER definite length rules
+ Returns -1 if too few data to compute message length
+ """
+ if isinstance(data, str): # fix for Python 2, data is string not bytes
+ data = bytearray(data) # Python 2 bytearray is equivalent to Python 3 bytes
+
+ ret_value = -1
+ if len(data) > 2:
+ if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
+ ret_value = data[1] + 2
+ else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
+ bytes_length = data[1] - 128
+ if len(data) >= bytes_length + 2:
+ value_length = 0
+ cont = bytes_length
+ for byte in data[2:2 + bytes_length]:
+ cont -= 1
+ value_length += byte * (256 ** cont)
+ ret_value = value_length + 2 + bytes_length
+
+ return ret_value
+
+ def decode_response(self, ldap_message):
+ """
+ Convert received LDAPMessage to a dict
+ """
+ message_type = ldap_message.getComponentByName('protocolOp').getName()
+ component = ldap_message['protocolOp'].getComponent()
+ controls = ldap_message['controls'] if ldap_message['controls'].hasValue() else None
+ if message_type == 'bindResponse':
+ if not bytes(component['matchedDN']).startswith(b'NTLM'): # patch for microsoft ntlm authentication
+ result = bind_response_to_dict(component)
+ else:
+ result = sicily_bind_response_to_dict(component)
+ elif message_type == 'searchResEntry':
+ result = search_result_entry_response_to_dict(component, self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
+ elif message_type == 'searchResDone':
+ result = search_result_done_response_to_dict(component)
+ elif message_type == 'searchResRef':
+ result = search_result_reference_response_to_dict(component)
+ elif message_type == 'modifyResponse':
+ result = modify_response_to_dict(component)
+ elif message_type == 'addResponse':
+ result = add_response_to_dict(component)
+ elif message_type == 'delResponse':
+ result = delete_response_to_dict(component)
+ elif message_type == 'modDNResponse':
+ result = modify_dn_response_to_dict(component)
+ elif message_type == 'compareResponse':
+ result = compare_response_to_dict(component)
+ elif message_type == 'extendedResp':
+ result = extended_response_to_dict(component)
+ elif message_type == 'intermediateResponse':
+ result = intermediate_response_to_dict(component)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'unknown response <%s> for <%s>', message_type, self.connection)
+ raise LDAPUnknownResponseError('unknown response')
+ result['type'] = message_type
+ if controls:
+ result['controls'] = dict()
+ for control in controls:
+ decoded_control = self.decode_control(control)
+ result['controls'][decoded_control[0]] = decoded_control[1]
+ return result
+
+ def decode_response_fast(self, ldap_message):
+ """
+ Convert received LDAPMessage from fast ber decoder to a dict
+ """
+ if ldap_message['protocolOp'] == 1: # bindResponse
+ if not ldap_message['payload'][1][3].startswith(b'NTLM'): # patch for microsoft ntlm authentication
+ result = bind_response_to_dict_fast(ldap_message['payload'])
+ else:
+ result = sicily_bind_response_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'bindResponse'
+ elif ldap_message['protocolOp'] == 4: # searchResEntry'
+ result = search_result_entry_response_to_dict_fast(ldap_message['payload'], self.connection.server.schema, self.connection.server.custom_formatter, self.connection.check_names)
+ result['type'] = 'searchResEntry'
+ elif ldap_message['protocolOp'] == 5: # searchResDone
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'searchResDone'
+ elif ldap_message['protocolOp'] == 19: # searchResRef
+ result = search_result_reference_response_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'searchResRef'
+ elif ldap_message['protocolOp'] == 7: # modifyResponse
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'modifyResponse'
+ elif ldap_message['protocolOp'] == 9: # addResponse
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'addResponse'
+ elif ldap_message['protocolOp'] == 11: # delResponse
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'delResponse'
+ elif ldap_message['protocolOp'] == 13: # modDNResponse
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'modDNResponse'
+ elif ldap_message['protocolOp'] == 15: # compareResponse
+ result = ldap_result_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'compareResponse'
+ elif ldap_message['protocolOp'] == 24: # extendedResp
+ result = extended_response_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'extendedResp'
+ elif ldap_message['protocolOp'] == 25: # intermediateResponse
+ result = intermediate_response_to_dict_fast(ldap_message['payload'])
+ result['type'] = 'intermediateResponse'
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'unknown response <%s> for <%s>', ldap_message['protocolOp'], self.connection)
+ raise LDAPUnknownResponseError('unknown response')
+ if ldap_message['controls']:
+ result['controls'] = dict()
+ for control in ldap_message['controls']:
+ decoded_control = self.decode_control_fast(control[3])
+ result['controls'][decoded_control[0]] = decoded_control[1]
+ return result
+
+ @staticmethod
+ def decode_control(control):
+ """
+ decode control, return a 2-element tuple where the first element is the control oid
+ and the second element is a dictionary with description (from Oids), criticality and decoded control value
+ """
+ control_type = str(control['controlType'])
+ criticality = bool(control['criticality'])
+ control_value = bytes(control['controlValue'])
+ unprocessed = None
+ if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
+ control_resp, unprocessed = decoder.decode(control_value, asn1Spec=RealSearchControlValue())
+ control_value = dict()
+ control_value['size'] = int(control_resp['size'])
+ control_value['cookie'] = bytes(control_resp['cookie'])
+ elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
+ control_resp, unprocessed = decoder.decode(control_value, asn1Spec=DirSyncControlResponseValue())
+ control_value = dict()
+ control_value['more_results'] = bool(control_resp['MoreResults']) # more_result if nonzero
+ control_value['cookie'] = bytes(control_resp['CookieServer'])
+ elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
+ control_resp, unprocessed = decoder.decode(control_value, asn1Spec=SearchResultEntry())
+ control_value = dict()
+ control_value['result'] = attributes_to_dict(control_resp['attributes'])
+ if unprocessed:
+ if log_enabled(ERROR):
+ log(ERROR, 'unprocessed control response in substrate')
+ raise LDAPControlError('unprocessed control response in substrate')
+ return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
+
+ @staticmethod
+ def decode_control_fast(control, from_server=True):
+ """
+ decode control, return a 2-element tuple where the first element is the control oid
+ and the second element is a dictionary with description (from Oids), criticality and decoded control value
+ """
+ control_type = str(to_unicode(control[0][3], from_server=from_server))
+ criticality = False
+ control_value = None
+ for r in control[1:]:
+ if r[2] == 4: # controlValue
+ control_value = r[3]
+ else:
+ criticality = False if r[3] == 0 else True # criticality (booleand default to False)
+ if control_type == '1.2.840.113556.1.4.319': # simple paged search as per RFC2696
+ control_resp = decode_sequence(control_value, 0, len(control_value))
+ control_value = dict()
+ control_value['size'] = int(control_resp[0][3][0][3])
+ control_value['cookie'] = bytes(control_resp[0][3][1][3])
+ elif control_type == '1.2.840.113556.1.4.841': # DirSync AD
+ control_resp = decode_sequence(control_value, 0, len(control_value))
+ control_value = dict()
+ control_value['more_results'] = True if control_resp[0][3][0][3] else False # more_result if nonzero
+ control_value['cookie'] = control_resp[0][3][2][3]
+ elif control_type == '1.3.6.1.1.13.1' or control_type == '1.3.6.1.1.13.2': # Pre-Read control, Post-Read Control as per RFC 4527
+ control_resp = decode_sequence(control_value, 0, len(control_value))
+ control_value = dict()
+ control_value['result'] = attributes_to_dict_fast(control_resp[0][3][1][3])
+ return control_type, {'description': Oids.get(control_type, ''), 'criticality': criticality, 'value': control_value}
+
+ @staticmethod
+ def decode_request(message_type, component, controls=None):
+ # message_type = ldap_message.getComponentByName('protocolOp').getName()
+ # component = ldap_message['protocolOp'].getComponent()
+ if message_type == 'bindRequest':
+ result = bind_request_to_dict(component)
+ elif message_type == 'unbindRequest':
+ result = dict()
+ elif message_type == 'addRequest':
+ result = add_request_to_dict(component)
+ elif message_type == 'compareRequest':
+ result = compare_request_to_dict(component)
+ elif message_type == 'delRequest':
+ result = delete_request_to_dict(component)
+ elif message_type == 'extendedReq':
+ result = extended_request_to_dict(component)
+ elif message_type == 'modifyRequest':
+ result = modify_request_to_dict(component)
+ elif message_type == 'modDNRequest':
+ result = modify_dn_request_to_dict(component)
+ elif message_type == 'searchRequest':
+ result = search_request_to_dict(component)
+ elif message_type == 'abandonRequest':
+ result = abandon_request_to_dict(component)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'unknown request <%s>', message_type)
+ raise LDAPUnknownRequestError('unknown request')
+ result['type'] = message_type
+ result['controls'] = controls
+
+ return result
+
+ def valid_referral_list(self, referrals):
+ referral_list = []
+ for referral in referrals:
+ candidate_referral = parse_uri(referral)
+ if candidate_referral:
+ for ref_host in self.connection.server.allowed_referral_hosts:
+ if ref_host[0] == candidate_referral['host'] or ref_host[0] == '*':
+ if candidate_referral['host'] not in self._referrals:
+ candidate_referral['anonymousBindOnly'] = not ref_host[1]
+ referral_list.append(candidate_referral)
+ break
+
+ return referral_list
+
+ def do_next_range_search(self, request, response, attr_name):
+ done = False
+ current_response = response
+ while not done:
+ attr_type, _, returned_range = attr_name.partition(';range=')
+ _, _, high_range = returned_range.partition('-')
+ response['raw_attributes'][attr_type] += current_response['raw_attributes'][attr_name]
+ response['attributes'][attr_type] += current_response['attributes'][attr_name]
+ if high_range != '*':
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'performing next search on auto-range <%s> via <%s>', str(int(high_range) + 1), self.connection)
+ requested_range = attr_type + ';range=' + str(int(high_range) + 1) + '-*'
+ result = self.connection.search(search_base=response['dn'],
+ search_filter='(objectclass=*)',
+ search_scope=BASE,
+ dereference_aliases=request['dereferenceAlias'],
+ attributes=[attr_type + ';range=' + str(int(high_range) + 1) + '-*'])
+ if isinstance(result, bool):
+ if result:
+ current_response = self.connection.response[0]
+ else:
+ done = True
+ else:
+ current_response, _ = self.get_response(result)
+ current_response = current_response[0]
+
+ if not done:
+ if requested_range in current_response['raw_attributes'] and len(current_response['raw_attributes'][requested_range]) == 0:
+ del current_response['raw_attributes'][requested_range]
+ del current_response['attributes'][requested_range]
+ attr_name = list(filter(lambda a: ';range=' in a, current_response['raw_attributes'].keys()))[0]
+ continue
+
+ done = True
+
+ def do_search_on_auto_range(self, request, response):
+ for resp in [r for r in response if r['type'] == 'searchResEntry']:
+ for attr_name in list(resp['raw_attributes'].keys()): # generate list to avoid changing of dict size error
+ if ';range=' in attr_name:
+ attr_type, _, range_values = attr_name.partition(';range=')
+ if range_values in ('1-1', '0-0'): # DirSync returns these values for adding and removing members
+ return False
+ if attr_type not in resp['raw_attributes'] or resp['raw_attributes'][attr_type] is None:
+ resp['raw_attributes'][attr_type] = list()
+ if attr_type not in resp['attributes'] or resp['attributes'][attr_type] is None:
+ resp['attributes'][attr_type] = list()
+ self.do_next_range_search(request, resp, attr_name)
+ return True
+
+ def create_referral_connection(self, referrals):
+ referral_connection = None
+ selected_referral = None
+ cachekey = None
+ valid_referral_list = self.valid_referral_list(referrals)
+ if valid_referral_list:
+ preferred_referral_list = [referral for referral in valid_referral_list if
+ referral['ssl'] == self.connection.server.ssl]
+ selected_referral = choice(preferred_referral_list) if preferred_referral_list else choice(
+ valid_referral_list)
+
+ cachekey = (selected_referral['host'], selected_referral['port'] or self.connection.server.port, selected_referral['ssl'])
+ if self.connection.use_referral_cache and cachekey in self.referral_cache:
+ referral_connection = self.referral_cache[cachekey]
+ else:
+ referral_server = Server(host=selected_referral['host'],
+ port=selected_referral['port'] or self.connection.server.port,
+ use_ssl=selected_referral['ssl'],
+ get_info=self.connection.server.get_info,
+ formatter=self.connection.server.custom_formatter,
+ connect_timeout=self.connection.server.connect_timeout,
+ mode=self.connection.server.mode,
+ allowed_referral_hosts=self.connection.server.allowed_referral_hosts,
+ tls=Tls(local_private_key_file=self.connection.server.tls.private_key_file,
+ local_certificate_file=self.connection.server.tls.certificate_file,
+ validate=self.connection.server.tls.validate,
+ version=self.connection.server.tls.version,
+ ca_certs_file=self.connection.server.tls.ca_certs_file) if
+ selected_referral['ssl'] else None)
+
+ from ..core.connection import Connection
+
+ referral_connection = Connection(server=referral_server,
+ user=self.connection.user if not selected_referral['anonymousBindOnly'] else None,
+ password=self.connection.password if not selected_referral['anonymousBindOnly'] else None,
+ version=self.connection.version,
+ authentication=self.connection.authentication if not selected_referral['anonymousBindOnly'] else ANONYMOUS,
+ client_strategy=SYNC,
+ auto_referrals=True,
+ read_only=self.connection.read_only,
+ check_names=self.connection.check_names,
+ raise_exceptions=self.connection.raise_exceptions,
+ fast_decoder=self.connection.fast_decoder,
+ receive_timeout=self.connection.receive_timeout,
+ sasl_mechanism=self.connection.sasl_mechanism,
+ sasl_credentials=self.connection.sasl_credentials)
+
+ if self.connection.usage:
+ self.connection._usage.referrals_connections += 1
+
+ referral_connection.open()
+ referral_connection.strategy._referrals = self._referrals
+ if self.connection.tls_started and not referral_server.ssl: # if the original server was in start_tls mode and the referral server is not in ssl then start_tls on the referral connection
+ referral_connection.start_tls()
+
+ if self.connection.bound:
+ referral_connection.bind()
+
+ if self.connection.usage:
+ self.connection._usage.referrals_followed += 1
+
+ return selected_referral, referral_connection, cachekey
+
+ def do_operation_on_referral(self, request, referrals):
+ if log_enabled(PROTOCOL):
+ log(PROTOCOL, 'following referral for <%s>', self.connection)
+ selected_referral, referral_connection, cachekey = self.create_referral_connection(referrals)
+ if selected_referral:
+ if request['type'] == 'searchRequest':
+ referral_connection.search(selected_referral['base'] or request['base'],
+ selected_referral['filter'] or request['filter'],
+ selected_referral['scope'] or request['scope'],
+ request['dereferenceAlias'],
+ selected_referral['attributes'] or request['attributes'],
+ request['sizeLimit'],
+ request['timeLimit'],
+ request['typesOnly'],
+ controls=request['controls'])
+ elif request['type'] == 'addRequest':
+ referral_connection.add(selected_referral['base'] or request['entry'],
+ None,
+ request['attributes'],
+ controls=request['controls'])
+ elif request['type'] == 'compareRequest':
+ referral_connection.compare(selected_referral['base'] or request['entry'],
+ request['attribute'],
+ request['value'],
+ controls=request['controls'])
+ elif request['type'] == 'delRequest':
+ referral_connection.delete(selected_referral['base'] or request['entry'],
+ controls=request['controls'])
+ elif request['type'] == 'extendedReq':
+ referral_connection.extended(request['name'],
+ request['value'],
+ controls=request['controls'],
+ no_encode=True
+ )
+ elif request['type'] == 'modifyRequest':
+ referral_connection.modify(selected_referral['base'] or request['entry'],
+ prepare_changes_for_request(request['changes']),
+ controls=request['controls'])
+ elif request['type'] == 'modDNRequest':
+ referral_connection.modify_dn(selected_referral['base'] or request['entry'],
+ request['newRdn'],
+ request['deleteOldRdn'],
+ request['newSuperior'],
+ controls=request['controls'])
+ else:
+ self.connection.last_error = 'referral operation not permitted'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPReferralError(self.connection.last_error)
+
+ response = referral_connection.response
+ result = referral_connection.result
+ if self.connection.use_referral_cache:
+ self.referral_cache[cachekey] = referral_connection
+ else:
+ referral_connection.unbind()
+ else:
+ response = None
+ result = None
+
+ return response, result
+
+ def sending(self, ldap_message):
+ if log_enabled(NETWORK):
+ log(NETWORK, 'sending 1 ldap message for <%s>', self.connection)
+ try:
+ encoded_message = encode(ldap_message)
+ self.connection.socket.sendall(encoded_message)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'ldap message sent via <%s>:%s', self.connection, format_ldap_message(ldap_message, '>>'))
+ if log_enabled(NETWORK):
+ log(NETWORK, 'sent %d bytes via <%s>', len(encoded_message), self.connection)
+ except socket.error as e:
+ self.connection.last_error = 'socket sending error' + str(e)
+ encoded_message = None
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketSendError, exc)(self.connection.last_error)
+ raise communication_exception_factory(LDAPSocketSendError, type(e)(str(e)))(self.connection.last_error)
+ if self.connection.usage:
+ self.connection._usage.update_transmitted_message(self.connection.request, len(encoded_message))
+
+ def _start_listen(self):
+ # overridden on strategy class
+ raise NotImplementedError
+
+ def _get_response(self, message_id, timeout):
+ # overridden in strategy class
+ raise NotImplementedError
+
+ def receiving(self):
+ # overridden in strategy class
+ raise NotImplementedError
+
+ def post_send_single_response(self, message_id):
+ # overridden in strategy class
+ raise NotImplementedError
+
+ def post_send_search(self, message_id):
+ # overridden in strategy class
+ raise NotImplementedError
+
+ def get_stream(self):
+ raise NotImplementedError
+
+ def set_stream(self, value):
+ raise NotImplementedError
+
+ def unbind_referral_cache(self):
+ while len(self.referral_cache) > 0:
+ cachekey, referral_connection = self.referral_cache.popitem()
+ referral_connection.unbind()
diff --git a/ldap3/strategy/ldifProducer.py b/ldap3/strategy/ldifProducer.py
index 119e172..392239e 100644
--- a/ldap3/strategy/ldifProducer.py
+++ b/ldap3/strategy/ldifProducer.py
@@ -1,148 +1,150 @@
-"""
-"""
-
-# Created on 2013.07.15
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from io import StringIO
-from os import linesep
-import random
-
-from ..core.exceptions import LDAPLDIFError
-from ..utils.conv import prepare_for_stream
-from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT
-from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
-from ..protocol.convert import build_controls_list
-from .base import BaseStrategy
-
-
-class LdifProducerStrategy(BaseStrategy):
- """
- This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations.
- You send the request and get the request in the ldif-change representation of the operation.
- NO OPERATION IS SENT TO THE LDAP SERVER!
- Connection.request will contain the result LDAP message in a dict form
- Connection.response will contain the ldif-change format of the requested operation if available
- You don't need a real server to connect to for this strategy
- """
-
- def __init__(self, ldap_connection):
- BaseStrategy.__init__(self, ldap_connection)
- self.sync = True
- self.no_real_dsa = True
- self.pooled = False
- self.can_stream = True
- self.line_separator = linesep
- self.all_base64 = False
- self.stream = None
- self.order = dict()
- self._header_added = False
- random.seed()
-
- def _open_socket(self, address, use_ssl=False, unix_socket=False): # fake open socket
- self.connection.socket = NotImplemented # placeholder for a dummy socket
- if self.connection.usage:
- self.connection._usage.open_sockets += 1
-
- self.connection.closed = False
-
- def _close_socket(self):
- if self.connection.usage:
- self.connection._usage.closed_sockets += 1
-
- self.connection.socket = None
- self.connection.closed = True
-
- def _start_listen(self):
- self.connection.listening = True
- self.connection.closed = False
- self._header_added = False
- if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
- self.set_stream(StringIO())
-
- def _stop_listen(self):
- self.stream.close()
- self.connection.listening = False
- self.connection.closed = True
-
- def receiving(self):
- return None
-
- def send(self, message_type, request, controls=None):
- """
- Build the LDAPMessage without sending to server
- """
- message_id = random.randint(0, LDAP_MAX_INT)
- ldap_message = LDAPMessage()
- ldap_message['messageID'] = MessageID(message_id)
- ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
- message_controls = build_controls_list(controls)
- if message_controls is not None:
- ldap_message['controls'] = message_controls
-
- self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
- self.connection.request['controls'] = controls
- self._outstanding[message_id] = self.connection.request
- return message_id
-
- def post_send_single_response(self, message_id):
- self.connection.response = None
- self.connection.result = None
- if self._outstanding and message_id in self._outstanding:
- request = self._outstanding.pop(message_id)
- ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type']))
- if self.stream and ldif_lines and not self.connection.closed:
- self.accumulate_stream(self.line_separator.join(ldif_lines))
- ldif_lines = add_ldif_header(ldif_lines)
- self.connection.response = self.line_separator.join(ldif_lines)
- return self.connection.response
-
- return None
-
- def post_send_search(self, message_id):
- raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations')
-
- def _get_response(self, message_id):
- pass
-
- def accumulate_stream(self, fragment):
- if not self._header_added and self.stream.tell() == 0:
- self._header_added = True
- header = add_ldif_header(['-'])[0]
- self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
- self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
-
- def get_stream(self):
- return self.stream
-
- def set_stream(self, value):
- error = False
- try:
- if not value.writable():
- error = True
- except (ValueError, AttributeError):
- error = True
-
- if error:
- raise LDAPLDIFError('stream must be writable')
-
- self.stream = value
+"""
+"""
+
+# Created on 2013.07.15
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from io import StringIO
+from os import linesep
+import random
+
+from ..core.exceptions import LDAPLDIFError
+from ..utils.conv import prepare_for_stream
+from ..protocol.rfc4511 import LDAPMessage, MessageID, ProtocolOp, LDAP_MAX_INT
+from ..protocol.rfc2849 import operation_to_ldif, add_ldif_header
+from ..protocol.convert import build_controls_list
+from .base import BaseStrategy
+
+
+class LdifProducerStrategy(BaseStrategy):
+ """
+ This strategy is used to create the LDIF stream for the Add, Delete, Modify, ModifyDn operations.
+ You send the request and get the request in the ldif-change representation of the operation.
+ NO OPERATION IS SENT TO THE LDAP SERVER!
+ Connection.request will contain the result LDAP message in a dict form
+ Connection.response will contain the ldif-change format of the requested operation if available
+ You don't need a real server to connect to for this strategy
+ """
+
+ def __init__(self, ldap_connection):
+ BaseStrategy.__init__(self, ldap_connection)
+ self.sync = True
+ self.no_real_dsa = True
+ self.pooled = False
+ self.can_stream = True
+ self.line_separator = linesep
+ self.all_base64 = False
+ self.stream = None
+ self.order = dict()
+ self._header_added = False
+ random.seed()
+
+ def _open_socket(self, address, use_ssl=False, unix_socket=False): # fake open socket
+ self.connection.socket = NotImplemented # placeholder for a dummy socket
+ if self.connection.usage:
+ self.connection._usage.open_sockets += 1
+
+ self.connection.closed = False
+
+ def _close_socket(self):
+ if self.connection.usage:
+ self.connection._usage.closed_sockets += 1
+
+ self.connection.socket = None
+ self.connection.closed = True
+
+ def _start_listen(self):
+ self.connection.listening = True
+ self.connection.closed = False
+ self._header_added = False
+ if not self.stream or (isinstance(self.stream, StringIO) and self.stream.closed):
+ self.set_stream(StringIO())
+
+ def _stop_listen(self):
+ self.stream.close()
+ self.connection.listening = False
+ self.connection.closed = True
+
+ def receiving(self):
+ return None
+
+ def send(self, message_type, request, controls=None):
+ """
+ Build the LDAPMessage without sending to server
+ """
+ message_id = random.randint(0, LDAP_MAX_INT)
+ ldap_message = LDAPMessage()
+ ldap_message['messageID'] = MessageID(message_id)
+ ldap_message['protocolOp'] = ProtocolOp().setComponentByName(message_type, request)
+ message_controls = build_controls_list(controls)
+ if message_controls is not None:
+ ldap_message['controls'] = message_controls
+
+ self.connection.request = BaseStrategy.decode_request(message_type, request, controls)
+ self.connection.request['controls'] = controls
+ if self._outstanding is None:
+ self._outstanding = dict()
+ self._outstanding[message_id] = self.connection.request
+ return message_id
+
+ def post_send_single_response(self, message_id):
+ self.connection.response = None
+ self.connection.result = None
+ if self._outstanding and message_id in self._outstanding:
+ request = self._outstanding.pop(message_id)
+ ldif_lines = operation_to_ldif(self.connection.request['type'], request, self.all_base64, self.order.get(self.connection.request['type']))
+ if self.stream and ldif_lines and not self.connection.closed:
+ self.accumulate_stream(self.line_separator.join(ldif_lines))
+ ldif_lines = add_ldif_header(ldif_lines)
+ self.connection.response = self.line_separator.join(ldif_lines)
+ return self.connection.response
+
+ return None
+
+ def post_send_search(self, message_id):
+ raise LDAPLDIFError('LDIF-CONTENT cannot be produced for Search operations')
+
+ def _get_response(self, message_id, timeout):
+ pass
+
+ def accumulate_stream(self, fragment):
+ if not self._header_added and self.stream.tell() == 0:
+ self._header_added = True
+ header = add_ldif_header(['-'])[0]
+ self.stream.write(prepare_for_stream(header + self.line_separator + self.line_separator))
+ self.stream.write(prepare_for_stream(fragment + self.line_separator + self.line_separator))
+
+ def get_stream(self):
+ return self.stream
+
+ def set_stream(self, value):
+ error = False
+ try:
+ if not value.writable():
+ error = True
+ except (ValueError, AttributeError):
+ error = True
+
+ if error:
+ raise LDAPLDIFError('stream must be writable')
+
+ self.stream = value
diff --git a/ldap3/strategy/mockAsync.py b/ldap3/strategy/mockAsync.py
index 2891506..f9965dc 100644
--- a/ldap3/strategy/mockAsync.py
+++ b/ldap3/strategy/mockAsync.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/strategy/mockBase.py b/ldap3/strategy/mockBase.py
index 5fc041c..7acf706 100644
--- a/ldap3/strategy/mockBase.py
+++ b/ldap3/strategy/mockBase.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -26,7 +26,6 @@
import json
import re
-from threading import Lock
from random import SystemRandom
from pyasn1.type.univ import OctetString
@@ -54,6 +53,7 @@ from ..protocol.formatters.standard import find_attribute_validator, format_attr
from ..protocol.rfc2696 import paged_search_control
from ..utils.log import log, log_enabled, ERROR, BASIC
from ..utils.asn1 import encode
+from ..utils.conv import ldap_escape_to_bytes
from ..strategy.base import BaseStrategy # needed for decode_control() method
from ..protocol.rfc4511 import LDAPMessage, ProtocolOp, MessageID
from ..protocol.convert import build_controls_list
@@ -167,7 +167,7 @@ class MockBaseStrategy(object):
self.bound = None
self.custom_validators = None
self.operational_attributes = ['entryDN']
- self.add_entry('cn=schema', []) # add default entry for schema
+ self.add_entry('cn=schema', [], validate=False) # add default entry for schema
self._paged_sets = [] # list of paged search in progress
if log_enabled(BASIC):
log(BASIC, 'instantiated <%s>: <%s>', self.__class__.__name__, self)
@@ -184,27 +184,32 @@ class MockBaseStrategy(object):
if self.connection.usage:
self.connection._usage.closed_sockets += 1
- def _prepare_value(self, attribute_type, value):
+ def _prepare_value(self, attribute_type, value, validate=True):
"""
Prepare a value for being stored in the mock DIT
:param value: object to store
:return: raw value to store in the DIT
"""
- validator = find_attribute_validator(self.connection.server.schema, attribute_type, self.custom_validators)
- validated = validator(value)
- if validated is False:
- raise LDAPInvalidValueError('value \'%s\' non valid for attribute \'%s\'' % (value, attribute_type))
- elif validated is not True: # a valid LDAP value equivalent to the actual value
- value = validated
+ if validate: # if loading from json dump do not validate values:
+ validator = find_attribute_validator(self.connection.server.schema, attribute_type, self.custom_validators)
+ validated = validator(value)
+ if validated is False:
+ raise LDAPInvalidValueError('value non valid for attribute \'%s\'' % attribute_type)
+ elif validated is not True: # a valid LDAP value equivalent to the actual value
+ value = validated
raw_value = to_raw(value)
if not isinstance(raw_value, bytes):
- raise LDAPInvalidValueError('added values must be bytes if no offline schema is provided in Mock strategies')
+ raise LDAPInvalidValueError('The value "%s" of type %s for "%s" must be bytes or an offline schema needs to be provided when Mock strategy is used.' % (
+ value,
+ type(value),
+ attribute_type,
+ ))
return raw_value
def _update_attribute(self, dn, attribute_type, value):
pass
- def add_entry(self, dn, attributes):
+ def add_entry(self, dn, attributes, validate=True):
with self.connection.server.dit_lock:
escaped_dn = safe_dn(dn)
if escaped_dn not in self.connection.server.dit:
@@ -218,7 +223,7 @@ class MockBaseStrategy(object):
return False
if attribute.lower() == 'objectclass' and self.connection.server.schema: # builds the objectClass hierarchy only if schema is present
class_set = set()
- for object_class in attributes['objectClass']:
+ for object_class in attributes[attribute]:
if self.connection.server.schema.object_classes and object_class not in self.connection.server.schema.object_classes:
return False
# walkups the class hierarchy and buils a set of all classes in it
@@ -233,7 +238,7 @@ class MockBaseStrategy(object):
class_set.update(new_classes)
new_entry['objectClass'] = [to_raw(value) for value in class_set]
else:
- new_entry[attribute] = [self._prepare_value(attribute, value) for value in attributes[attribute]]
+ new_entry[attribute] = [self._prepare_value(attribute, value, validate) for value in attributes[attribute]]
for rdn in safe_rdn(escaped_dn, decompose=True): # adds rdns to entry attributes
if rdn[0] not in new_entry: # if rdn attribute is missing adds attribute and its value
new_entry[rdn[0]] = [to_raw(rdn[1])]
@@ -275,7 +280,7 @@ class MockBaseStrategy(object):
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
raise LDAPDefinitionError(self.connection.last_error)
- self.add_entry(entry['dn'], entry['raw'])
+ self.add_entry(entry['dn'], entry['raw'], validate=False)
target.close()
def mock_bind(self, request_message, controls):
@@ -648,20 +653,21 @@ class MockBaseStrategy(object):
if '+' in attributes: # operational attributes requested
attributes.extend(self.operational_attributes)
attributes.remove('+')
+
attributes = [attr.lower() for attr in request['attributes']]
- filter_root = parse_filter(request['filter'], self.connection.server.schema, auto_escape=True, auto_encode=False, check_names=self.connection.check_names)
+ filter_root = parse_filter(request['filter'], self.connection.server.schema, auto_escape=True, auto_encode=False, validator=self.connection.server.custom_validator, check_names=self.connection.check_names)
candidates = []
if scope == 0: # base object
if base in self.connection.server.dit or base.lower() == 'cn=schema':
candidates.append(base)
elif scope == 1: # single level
for entry in self.connection.server.dit:
- if entry.endswith(base) and ',' not in entry[:-len(base) - 1]: # only leafs without commas in the remaining dn
+ if entry.lower().endswith(base.lower()) and ',' not in entry[:-len(base) - 1]: # only leafs without commas in the remaining dn
candidates.append(entry)
elif scope == 2: # whole subtree
for entry in self.connection.server.dit:
- if entry.endswith(base):
+ if entry.lower().endswith(base.lower()):
candidates.append(entry)
if not candidates: # incorrect base
@@ -669,17 +675,25 @@ class MockBaseStrategy(object):
message = 'incorrect base object'
else:
matched = self.evaluate_filter_node(filter_root, candidates)
- for match in matched:
- responses.append({
- 'object': match,
- 'attributes': [{'type': attribute,
- 'vals': [] if request['typesOnly'] else self.connection.server.dit[match][attribute]}
- for attribute in self.connection.server.dit[match]
- if attribute.lower() in attributes or ALL_ATTRIBUTES in attributes]
- })
-
- result_code = 0
- message = ''
+ if self.connection.raise_exceptions and 0 < request['sizeLimit'] < len(matched):
+ result_code = 4
+ message = 'size limit exceeded'
+ else:
+ for match in matched:
+ responses.append({
+ 'object': match,
+ 'attributes': [{'type': attribute,
+ 'vals': [] if request['typesOnly'] else self.connection.server.dit[match][attribute]}
+ for attribute in self.connection.server.dit[match]
+ if attribute.lower() in attributes or ALL_ATTRIBUTES in attributes]
+ })
+ if '+' not in attributes: # remove operational attributes
+ for op_attr in self.operational_attributes:
+ for i, attr in enumerate(responses[len(responses)-1]['attributes']):
+ if attr['type'] == op_attr:
+ del responses[len(responses)-1]['attributes'][i]
+ result_code = 0
+ message = ''
result = {'resultCode': result_code,
'matchedDN': '',
@@ -714,12 +728,12 @@ class MockBaseStrategy(object):
if extension[0] == '2.16.840.1.113719.1.27.100.31': # getBindDNRequest [NOVELL]
result_code = 0
message = ''
- response_name = '2.16.840.1.113719.1.27.100.32' # getBindDNResponse [NOVELL]
+ response_name = OctetString('2.16.840.1.113719.1.27.100.32') # getBindDNResponse [NOVELL]
response_value = OctetString(self.bound)
elif extension[0] == '1.3.6.1.4.1.4203.1.11.3': # WhoAmI [RFC4532]
result_code = 0
message = ''
- response_name = '1.3.6.1.4.1.4203.1.11.3' # WhoAmI [RFC4532]
+ response_name = OctetString('1.3.6.1.4.1.4203.1.11.3') # WhoAmI [RFC4532]
response_value = OctetString(self.bound)
break
@@ -835,42 +849,28 @@ class MockBaseStrategy(object):
attr_name = node.assertion['attr']
attr_value = node.assertion['value']
for candidate in candidates:
- # if attr_name in self.connection.server.dit[candidate] and attr_value in self.connection.server.dit[candidate][attr_name]:
if attr_name in self.connection.server.dit[candidate] and self.equal(candidate, attr_name, attr_value):
node.matched.add(candidate)
- # elif attr_name in self.connection.server.dit[candidate]: # tries to apply formatters
- # formatted_values = format_attribute_values(self.connection.server.schema, attr_name, self.connection.server.dit[candidate][attr_name], None)
- # if not isinstance(formatted_values, SEQUENCE_TYPES):
- # formatted_values = [formatted_values]
- # # if attr_value.decode(SERVER_ENCODING) in formatted_values: # attributes values should be returned in utf-8
- # if self.equal(attr_name, attr_value.decode(SERVER_ENCODING), formatted_values): # attributes values should be returned in utf-8
- # node.matched.add(candidate)
- # else:
- # node.unmatched.add(candidate)
else:
node.unmatched.add(candidate)
- def equal(self, dn, attribute, value):
+ def equal(self, dn, attribute_type, value_to_check):
# value is the value to match
- attribute_values = self.connection.server.dit[dn][attribute]
+ attribute_values = self.connection.server.dit[dn][attribute_type]
if not isinstance(attribute_values, SEQUENCE_TYPES):
attribute_values = [attribute_values]
+ escaped_value_to_check = ldap_escape_to_bytes(value_to_check)
for attribute_value in attribute_values:
- if self._check_equality(value, attribute_value):
+ if self._check_equality(escaped_value_to_check, attribute_value):
return True
-
- # if not found tries to apply formatters
- formatted_values = format_attribute_values(self.connection.server.schema, attribute, attribute_values, None)
- if not isinstance(formatted_values, SEQUENCE_TYPES):
- formatted_values = [formatted_values]
- for attribute_value in formatted_values:
- if self._check_equality(value, attribute_value):
+ if self._check_equality(self._prepare_value(attribute_type, value_to_check), attribute_value):
return True
-
return False
@staticmethod
def _check_equality(value1, value2):
+ if value1 == value2: # exact matching
+ return True
if str(value1).isdigit() and str(value2).isdigit():
if int(value1) == int(value2): # int comparison
return True
diff --git a/ldap3/strategy/mockSync.py b/ldap3/strategy/mockSync.py
index b155781..efd2c15 100644
--- a/ldap3/strategy/mockSync.py
+++ b/ldap3/strategy/mockSync.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/strategy/restartable.py b/ldap3/strategy/restartable.py
index 68c77ec..d739f41 100644
--- a/ldap3/strategy/restartable.py
+++ b/ldap3/strategy/restartable.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -23,10 +23,8 @@
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
-from sys import exc_info
from time import sleep
import socket
-from datetime import datetime
from .. import get_config_parameter
from .sync import SyncStrategy
@@ -68,7 +66,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e: # machinery for restartable connection
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
if not self._restarting: # if not already performing a restart
self._restarting = True
@@ -82,8 +80,10 @@ class RestartableStrategy(SyncStrategy):
self.connection.unbind()
except (socket.error, LDAPSocketOpenError): # don't trace catch socket errors because socket could already be closed
pass
- except Exception:
- self._add_exception_to_history()
+ except Exception as e:
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> while restarting <%s>', e, self.connection)
+ self._add_exception_to_history(type(e)(str(e)))
try: # reissuing same operation
if self.connection.server_pool:
new_server = self.connection.server_pool.get_server(self.connection) # get a server from the server_pool if available
@@ -101,7 +101,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
if self.connection.usage:
self.connection._usage.restartable_failures += 1
if not isinstance(self.restartable_tries, bool):
@@ -128,7 +128,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
if not self._restarting: # machinery for restartable connection
self._restarting = True
counter = self.restartable_tries
@@ -144,7 +144,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
failure = False
try: # reopening connection
self.connection.open(reset_usage=False, read_server_info=False)
@@ -159,7 +159,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
failure = True
if not failure:
@@ -173,7 +173,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
failure = True
if failure and self.connection.usage:
@@ -197,7 +197,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
# if an LDAPExceptionError is raised then resend the request
try:
@@ -207,15 +207,12 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
- exc = e
-
- if exc:
- if not isinstance(exc, LDAPOperationResult):
+ self._add_exception_to_history(type(e)(str(e)))
+ if not isinstance(e, LDAPOperationResult):
self.connection.last_error = 'restartable connection strategy failed in post_send_single_response'
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise exc
+ raise
def post_send_search(self, message_id):
try:
@@ -225,7 +222,7 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
+ self._add_exception_to_history(type(e)(str(e)))
# if an LDAPExceptionError is raised then resend the request
try:
@@ -235,20 +232,17 @@ class RestartableStrategy(SyncStrategy):
except Exception as e:
if log_enabled(ERROR):
log(ERROR, '<%s> while restarting <%s>', e, self.connection)
- self._add_exception_to_history()
- exc = e
-
- if exc:
- if not isinstance(exc, LDAPOperationResult):
- self.connection.last_error = exc.args
+ self._add_exception_to_history(type(e)(str(e)))
+ if not isinstance(e, LDAPOperationResult):
+ self.connection.last_error = e.args
if log_enabled(ERROR):
log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise exc
+ raise e
- def _add_exception_to_history(self):
+ def _add_exception_to_history(self, exc):
if not isinstance(self.restartable_tries, bool): # doesn't accumulate when restarting forever
- if not isinstance(exc_info()[1], LDAPMaximumRetriesError): # doesn't add the LDAPMaximumRetriesError exception
- self.exception_history.append((datetime.now(), exc_info()[0], exc_info()[1]))
+ if not isinstance(exc, LDAPMaximumRetriesError): # doesn't add the LDAPMaximumRetriesError exception
+ self.exception_history.append(exc)
def _reset_exception_history(self):
if self.exception_history:
diff --git a/ldap3/strategy/reusable.py b/ldap3/strategy/reusable.py
index dce11d0..01bd9d3 100644
--- a/ldap3/strategy/reusable.py
+++ b/ldap3/strategy/reusable.py
@@ -1,479 +1,495 @@
-"""
-"""
-
-# Created on 2014.03.23
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from datetime import datetime
-from os import linesep
-from threading import Thread, Lock
-from time import sleep
-
-from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND
-from .base import BaseStrategy
-from ..core.usage import ConnectionUsage
-from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError
-from ..utils.log import log, log_enabled, ERROR, BASIC
-from ..protocol.rfc4511 import LDAP_MAX_INT
-
-TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION'
-
-BOGUS_BIND = -1
-BOGUS_UNBIND = -2
-BOGUS_EXTENDED = -3
-BOGUS_ABANDON = -4
-
-try:
- from queue import Queue, Empty
-except ImportError: # Python 2
- # noinspection PyUnresolvedReferences
- from Queue import Queue, Empty
-
-
-# noinspection PyProtectedMember
-class ReusableStrategy(BaseStrategy):
- """
- A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime.
- The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of
- connections that can be used as needed. Each connection lives in its own thread and has a busy/available status.
- The strategy performs the requested operation on the first available connection.
- The pool of connections is instantiated at strategy initialization.
- Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection.
- When lifetime is expired the connection is closed and will be open again when needed.
- """
-
- def receiving(self):
- raise NotImplementedError
-
- def _start_listen(self):
- raise NotImplementedError
-
- def _get_response(self, message_id):
- raise NotImplementedError
-
- def get_stream(self):
- raise NotImplementedError
-
- def set_stream(self, value):
- raise NotImplementedError
-
- pools = dict()
-
- # noinspection PyProtectedMember
- class ConnectionPool(object):
- """
- Container for the Connection Threads
- """
- def __new__(cls, connection):
- if connection.pool_name in ReusableStrategy.pools: # returns existing connection pool
- pool = ReusableStrategy.pools[connection.pool_name]
- if not pool.started: # if pool is not started remove it from the pools singleton and create a new onw
- del ReusableStrategy.pools[connection.pool_name]
- return object.__new__(cls)
- if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive: # change lifetime
- pool.keepalive = connection.pool_keepalive
- if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime: # change keepalive
- pool.lifetime = connection.pool_lifetime
- if connection.pool_size and pool.pool_size != connection.pool_size: # if pool size has changed terminate and recreate the connections
- pool.terminate_pool()
- pool.pool_size = connection.pool_size
- return pool
- else:
- return object.__new__(cls)
-
- def __init__(self, connection):
- if not hasattr(self, 'workers'):
- self.name = connection.pool_name
- self.master_connection = connection
- self.workers = []
- self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE')
- self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME')
- self.keepalive = connection.pool_keepalive
- self.request_queue = Queue()
- self.open_pool = False
- self.bind_pool = False
- self.tls_pool = False
- self._incoming = dict()
- self.counter = 0
- self.terminated_usage = ConnectionUsage() if connection._usage else None
- self.terminated = False
- self.pool_lock = Lock()
- ReusableStrategy.pools[self.name] = self
- self.started = False
- if log_enabled(BASIC):
- log(BASIC, 'instantiated ConnectionPool: <%r>', self)
-
- def __str__(self):
- s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated')
- s += ' - responses in queue: ' + str(len(self._incoming))
- s += ' - pool size: ' + str(self.pool_size)
- s += ' - lifetime: ' + str(self.lifetime)
- s += ' - keepalive: ' + str(self.keepalive)
- s += ' - open: ' + str(self.open_pool)
- s += ' - bind: ' + str(self.bind_pool)
- s += ' - tls: ' + str(self.tls_pool) + linesep
- s += 'MASTER CONN: ' + str(self.master_connection) + linesep
- s += 'WORKERS:'
- if self.workers:
- for i, worker in enumerate(self.workers):
- s += linesep + str(i).rjust(5) + ': ' + str(worker)
- else:
- s += linesep + ' no active workers in pool'
-
- return s
-
- def __repr__(self):
- return self.__str__()
-
- def get_info_from_server(self):
- for worker in self.workers:
- with worker.worker_lock:
- if not worker.connection.server.schema or not worker.connection.server.info:
- worker.get_info_from_server = True
- else:
- worker.get_info_from_server = False
-
- def rebind_pool(self):
- for worker in self.workers:
- with worker.worker_lock:
- worker.connection.rebind(self.master_connection.user,
- self.master_connection.password,
- self.master_connection.authentication,
- self.master_connection.sasl_mechanism,
- self.master_connection.sasl_credentials)
-
- def start_pool(self):
- if not self.started:
- self.create_pool()
- for worker in self.workers:
- with worker.worker_lock:
- worker.thread.start()
- self.started = True
- self.terminated = False
- if log_enabled(BASIC):
- log(BASIC, 'worker started for pool <%s>', self)
- return True
- return False
-
- def create_pool(self):
- if log_enabled(BASIC):
- log(BASIC, 'created pool <%s>', self)
- self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)]
-
- def terminate_pool(self):
- if not self.terminated:
- if log_enabled(BASIC):
- log(BASIC, 'terminating pool <%s>', self)
- self.started = False
- self.request_queue.join() # waits for all queue pending operations
- for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])): # put a TERMINATE signal on the queue for each active thread
- self.request_queue.put((TERMINATE_REUSABLE, None, None, None))
- self.request_queue.join() # waits for all queue terminate operations
- self.terminated = True
- if log_enabled(BASIC):
- log(BASIC, 'pool terminated for <%s>', self)
-
- class PooledConnectionThread(Thread):
- """
- The thread that holds the Reusable connection and receive operation request via the queue
- Result are sent back in the pool._incoming list when ready
- """
- def __init__(self, worker, master_connection):
- Thread.__init__(self)
- self.daemon = True
- self.worker = worker
- self.master_connection = master_connection
- if log_enabled(BASIC):
- log(BASIC, 'instantiated PooledConnectionThread: <%r>', self)
-
- # noinspection PyProtectedMember
- def run(self):
- self.worker.running = True
- terminate = False
- pool = self.master_connection.strategy.pool
- while not terminate:
- try:
- counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive)
- except Empty: # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation
- if not self.worker.connection.closed:
- self.worker.connection.abandon(0)
- continue
-
- with self.worker.worker_lock:
- self.worker.busy = True
- if counter == TERMINATE_REUSABLE:
- terminate = True
- if self.worker.connection.bound:
- try:
- self.worker.connection.unbind()
- if log_enabled(BASIC):
- log(BASIC, 'thread terminated')
- except LDAPExceptionError:
- pass
- else:
- if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime: # destroy and create a new connection
- try:
- self.worker.connection.unbind()
- except LDAPExceptionError:
- pass
- self.worker.new_connection()
- if log_enabled(BASIC):
- log(BASIC, 'thread respawn')
- if message_type not in ['bindRequest', 'unbindRequest']:
- if pool.open_pool and self.worker.connection.closed:
- self.worker.connection.open(read_server_info=False)
- if pool.tls_pool and not self.worker.connection.tls_started:
- self.worker.connection.start_tls(read_server_info=False)
- if pool.bind_pool and not self.worker.connection.bound:
- self.worker.connection.bind(read_server_info=False)
- elif pool.open_pool and not self.worker.connection.closed: # connection already open, issues a start_tls
- if pool.tls_pool and not self.worker.connection.tls_started:
- self.worker.connection.start_tls(read_server_info=False)
- if self.worker.get_info_from_server and counter:
- self.worker.connection._fire_deferred()
- self.worker.get_info_from_server = False
- exc = None
- response = None
- result = None
- try:
- if message_type == 'searchRequest':
- response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls))
- else:
- response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls))
- result = self.worker.connection.result
- except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread
- exc = e
- with pool.pool_lock:
- if exc:
- pool._incoming[counter] = (exc, None, None)
- else:
- pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
-
- self.worker.busy = False
- pool.request_queue.task_done()
- self.worker.task_counter += 1
- if log_enabled(BASIC):
- log(BASIC, 'thread terminated')
- if self.master_connection.usage:
- pool.terminated_usage += self.worker.connection.usage
- self.worker.running = False
-
- class PooledConnectionWorker(object):
- """
- Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool
- """
- def __init__(self, connection, request_queue):
- self.master_connection = connection
- self.request_queue = request_queue
- self.running = False
- self.busy = False
- self.get_info_from_server = False
- self.connection = None
- self.creation_time = None
- self.new_connection()
- self.task_counter = 0
- self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection)
- self.worker_lock = Lock()
- if log_enabled(BASIC):
- log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self)
-
- def __str__(self):
- s = 'CONN: ' + str(self.connection) + linesep + ' THREAD: '
- s += 'running' if self.running else 'halted'
- s += ' - ' + ('busy' if self.busy else 'available')
- s += ' - ' + ('created at: ' + self.creation_time.isoformat())
- s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds)
- s += ' - requests served: ' + str(self.task_counter)
-
- return s
-
- def new_connection(self):
- from ..core.connection import Connection
- # noinspection PyProtectedMember
- self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server,
- user=self.master_connection.user,
- password=self.master_connection.password,
- auto_bind=AUTO_BIND_NONE, # do not perform auto_bind because it reads again the schema
- version=self.master_connection.version,
- authentication=self.master_connection.authentication,
- client_strategy=RESTARTABLE,
- auto_referrals=self.master_connection.auto_referrals,
- auto_range=self.master_connection.auto_range,
- sasl_mechanism=self.master_connection.sasl_mechanism,
- sasl_credentials=self.master_connection.sasl_credentials,
- check_names=self.master_connection.check_names,
- collect_usage=self.master_connection._usage,
- read_only=self.master_connection.read_only,
- raise_exceptions=self.master_connection.raise_exceptions,
- lazy=False,
- fast_decoder=self.master_connection.fast_decoder,
- receive_timeout=self.master_connection.receive_timeout,
- return_empty_attributes=self.master_connection.empty_attributes)
-
- # simulates auto_bind, always with read_server_info=False
- if self.master_connection.auto_bind and self.master_connection.auto_bind != AUTO_BIND_NONE:
- if log_enabled(BASIC):
- log(BASIC, 'performing automatic bind for <%s>', self.connection)
- self.connection.open(read_server_info=False)
- if self.master_connection.auto_bind == AUTO_BIND_NO_TLS:
- self.connection.bind(read_server_info=False)
- elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
- self.connection.start_tls(read_server_info=False)
- self.connection.bind(read_server_info=False)
- elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
- self.connection.bind(read_server_info=False)
- self.connection.start_tls(read_server_info=False)
-
- if self.master_connection.server_pool:
- self.connection.server_pool = self.master_connection.server_pool
- self.connection.server_pool.initialize(self.connection)
-
- self.creation_time = datetime.now()
-
- # ReusableStrategy methods
- def __init__(self, ldap_connection):
- BaseStrategy.__init__(self, ldap_connection)
- self.sync = False
- self.no_real_dsa = False
- self.pooled = True
- self.can_stream = False
- if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name:
- self.pool = ReusableStrategy.ConnectionPool(ldap_connection)
- else:
- if log_enabled(ERROR):
- log(ERROR, 'reusable connection must have a pool_name')
- raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name')
-
- def open(self, reset_usage=True, read_server_info=True):
- # read_server_info not used
- self.pool.open_pool = True
- self.pool.start_pool()
- self.connection.closed = False
- if self.connection.usage:
- if reset_usage or not self.connection._usage.initial_connection_start_time:
- self.connection._usage.start()
-
- def terminate(self):
- self.pool.terminate_pool()
- self.pool.open_pool = False
- self.connection.bound = False
- self.connection.closed = True
- self.pool.bind_pool = False
- self.pool.tls_pool = False
-
- def _close_socket(self):
- """
- Doesn't really close the socket
- """
- self.connection.closed = True
-
- if self.connection.usage:
- self.connection._usage.closed_sockets += 1
-
- def send(self, message_type, request, controls=None):
- if self.pool.started:
- if message_type == 'bindRequest':
- self.pool.bind_pool = True
- counter = BOGUS_BIND
- elif message_type == 'unbindRequest':
- self.pool.bind_pool = False
- counter = BOGUS_UNBIND
- elif message_type == 'abandonRequest':
- counter = BOGUS_ABANDON
- elif message_type == 'extendedReq' and self.connection.starting_tls:
- self.pool.tls_pool = True
- counter = BOGUS_EXTENDED
- else:
- with self.pool.pool_lock:
- self.pool.counter += 1
- if self.pool.counter > LDAP_MAX_INT:
- self.pool.counter = 1
- counter = self.pool.counter
- self.pool.request_queue.put((counter, message_type, request, controls))
- return counter
- if log_enabled(ERROR):
- log(ERROR, 'reusable connection pool not started')
- raise LDAPConnectionPoolNotStartedError('reusable connection pool not started')
-
- def validate_bind(self, controls):
- temp_connection = self.pool.workers[0].connection
- temp_connection.lazy = False
- if not self.connection.server.schema or not self.connection.server.info:
- result = self.pool.workers[0].connection.bind(controls=controls)
- else:
- result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False)
-
- temp_connection.unbind()
- temp_connection.lazy = True
- if result:
- self.pool.bind_pool = True # bind pool if bind is validated
- return result
-
- def get_response(self, counter, timeout=None, get_request=False):
- sleeptime = get_config_parameter('RESPONSE_SLEEPTIME')
- request=None
- if timeout is None:
- timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
- if counter == BOGUS_BIND: # send a bogus bindResponse
- response = list()
- result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '<bogus Bind response>', 'saslCreds': None}
- elif counter == BOGUS_UNBIND: # bogus unbind response
- response = None
- result = None
- elif counter == BOGUS_ABANDON: # abandon cannot be executed because of multiple connections
- response = list()
- result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
- elif counter == BOGUS_EXTENDED: # bogus startTls extended response
- response = list()
- result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
- self.connection.starting_tls = False
- else:
- response = None
- result = None
- while timeout >= 0: # waiting for completed message to appear in _incoming
- try:
- with self.connection.strategy.pool.pool_lock:
- response, result, request = self.connection.strategy.pool._incoming.pop(counter)
- except KeyError:
- sleep(sleeptime)
- timeout -= sleeptime
- continue
- break
-
- if timeout <= 0:
- if log_enabled(ERROR):
- log(ERROR, 'no response from worker threads in Reusable connection')
- raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection')
-
- if isinstance(response, LDAPOperationResult):
- raise response # an exception has been raised with raise_exceptions
-
- if get_request:
- return response, result, request
-
- return response, result
-
- def post_send_single_response(self, counter):
- return counter
-
- def post_send_search(self, counter):
- return counter
+"""
+"""
+
+# Created on 2014.03.23
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from datetime import datetime
+from os import linesep
+from threading import Thread, Lock
+from time import sleep
+
+from .. import RESTARTABLE, get_config_parameter, AUTO_BIND_DEFAULT, AUTO_BIND_NONE, AUTO_BIND_NO_TLS, AUTO_BIND_TLS_AFTER_BIND, AUTO_BIND_TLS_BEFORE_BIND
+from .base import BaseStrategy
+from ..core.usage import ConnectionUsage
+from ..core.exceptions import LDAPConnectionPoolNameIsMandatoryError, LDAPConnectionPoolNotStartedError, LDAPOperationResult, LDAPExceptionError, LDAPResponseTimeoutError
+from ..utils.log import log, log_enabled, ERROR, BASIC
+from ..protocol.rfc4511 import LDAP_MAX_INT
+
+TERMINATE_REUSABLE = 'TERMINATE_REUSABLE_CONNECTION'
+
+BOGUS_BIND = -1
+BOGUS_UNBIND = -2
+BOGUS_EXTENDED = -3
+BOGUS_ABANDON = -4
+
+try:
+ from queue import Queue, Empty
+except ImportError: # Python 2
+ # noinspection PyUnresolvedReferences
+ from Queue import Queue, Empty
+
+
+# noinspection PyProtectedMember
+class ReusableStrategy(BaseStrategy):
+ """
+ A pool of reusable SyncWaitRestartable connections with lazy behaviour and limited lifetime.
+ The connection using this strategy presents itself as a normal connection, but internally the strategy has a pool of
+ connections that can be used as needed. Each connection lives in its own thread and has a busy/available status.
+ The strategy performs the requested operation on the first available connection.
+ The pool of connections is instantiated at strategy initialization.
+ Strategy has two customizable properties, the total number of connections in the pool and the lifetime of each connection.
+ When lifetime is expired the connection is closed and will be open again when needed.
+ """
+ pools = dict()
+
+ def receiving(self):
+ raise NotImplementedError
+
+ def _start_listen(self):
+ raise NotImplementedError
+
+ def _get_response(self, message_id, timeout):
+ raise NotImplementedError
+
+ def get_stream(self):
+ raise NotImplementedError
+
+ def set_stream(self, value):
+ raise NotImplementedError
+
+ # noinspection PyProtectedMember
+ class ConnectionPool(object):
+ """
+ Container for the Connection Threads
+ """
+ def __new__(cls, connection):
+ if connection.pool_name in ReusableStrategy.pools: # returns existing connection pool
+ pool = ReusableStrategy.pools[connection.pool_name]
+ if not pool.started: # if pool is not started remove it from the pools singleton and create a new onw
+ del ReusableStrategy.pools[connection.pool_name]
+ return object.__new__(cls)
+ if connection.pool_keepalive and pool.keepalive != connection.pool_keepalive: # change lifetime
+ pool.keepalive = connection.pool_keepalive
+ if connection.pool_lifetime and pool.lifetime != connection.pool_lifetime: # change keepalive
+ pool.lifetime = connection.pool_lifetime
+ if connection.pool_size and pool.pool_size != connection.pool_size: # if pool size has changed terminate and recreate the connections
+ pool.terminate_pool()
+ pool.pool_size = connection.pool_size
+ return pool
+ else:
+ return object.__new__(cls)
+
+ def __init__(self, connection):
+ if not hasattr(self, 'workers'):
+ self.name = connection.pool_name
+ self.master_connection = connection
+ self.workers = []
+ self.pool_size = connection.pool_size or get_config_parameter('REUSABLE_THREADED_POOL_SIZE')
+ self.lifetime = connection.pool_lifetime or get_config_parameter('REUSABLE_THREADED_LIFETIME')
+ self.keepalive = connection.pool_keepalive
+ self.request_queue = Queue()
+ self.open_pool = False
+ self.bind_pool = False
+ self.tls_pool = False
+ self._incoming = dict()
+ self.counter = 0
+ self.terminated_usage = ConnectionUsage() if connection._usage else None
+ self.terminated = False
+ self.pool_lock = Lock()
+ ReusableStrategy.pools[self.name] = self
+ self.started = False
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated ConnectionPool: <%r>', self)
+
+ def __str__(self):
+ s = 'POOL: ' + str(self.name) + ' - status: ' + ('started' if self.started else 'terminated')
+ s += ' - responses in queue: ' + str(len(self._incoming))
+ s += ' - pool size: ' + str(self.pool_size)
+ s += ' - lifetime: ' + str(self.lifetime)
+ s += ' - keepalive: ' + str(self.keepalive)
+ s += ' - open: ' + str(self.open_pool)
+ s += ' - bind: ' + str(self.bind_pool)
+ s += ' - tls: ' + str(self.tls_pool) + linesep
+ s += 'MASTER CONN: ' + str(self.master_connection) + linesep
+ s += 'WORKERS:'
+ if self.workers:
+ for i, worker in enumerate(self.workers):
+ s += linesep + str(i).rjust(5) + ': ' + str(worker)
+ else:
+ s += linesep + ' no active workers in pool'
+
+ return s
+
+ def __repr__(self):
+ return self.__str__()
+
+ def get_info_from_server(self):
+ for worker in self.workers:
+ with worker.worker_lock:
+ if not worker.connection.server.schema or not worker.connection.server.info:
+ worker.get_info_from_server = True
+ else:
+ worker.get_info_from_server = False
+
+ def rebind_pool(self):
+ for worker in self.workers:
+ with worker.worker_lock:
+ worker.connection.rebind(self.master_connection.user,
+ self.master_connection.password,
+ self.master_connection.authentication,
+ self.master_connection.sasl_mechanism,
+ self.master_connection.sasl_credentials)
+
+ def start_pool(self):
+ if not self.started:
+ self.create_pool()
+ for worker in self.workers:
+ with worker.worker_lock:
+ worker.thread.start()
+ self.started = True
+ self.terminated = False
+ if log_enabled(BASIC):
+ log(BASIC, 'worker started for pool <%s>', self)
+ return True
+ return False
+
+ def create_pool(self):
+ if log_enabled(BASIC):
+ log(BASIC, 'created pool <%s>', self)
+ self.workers = [ReusableStrategy.PooledConnectionWorker(self.master_connection, self.request_queue) for _ in range(self.pool_size)]
+
+ def terminate_pool(self):
+ if not self.terminated:
+ if log_enabled(BASIC):
+ log(BASIC, 'terminating pool <%s>', self)
+ self.started = False
+ self.request_queue.join() # waits for all queue pending operations
+ for _ in range(len([worker for worker in self.workers if worker.thread.is_alive()])): # put a TERMINATE signal on the queue for each active thread
+ self.request_queue.put((TERMINATE_REUSABLE, None, None, None))
+ self.request_queue.join() # waits for all queue terminate operations
+ self.terminated = True
+ if log_enabled(BASIC):
+ log(BASIC, 'pool terminated for <%s>', self)
+
+ class PooledConnectionThread(Thread):
+ """
+ The thread that holds the Reusable connection and receive operation request via the queue
+ Result are sent back in the pool._incoming list when ready
+ """
+ def __init__(self, worker, master_connection):
+ Thread.__init__(self)
+ self.daemon = True
+ self.worker = worker
+ self.master_connection = master_connection
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated PooledConnectionThread: <%r>', self)
+
+ # noinspection PyProtectedMember
+ def run(self):
+ self.worker.running = True
+ terminate = False
+ pool = self.master_connection.strategy.pool
+ while not terminate:
+ try:
+ counter, message_type, request, controls = pool.request_queue.get(block=True, timeout=self.master_connection.strategy.pool.keepalive)
+ except Empty: # issue an Abandon(0) operation to keep the connection live - Abandon(0) is a harmless operation
+ if not self.worker.connection.closed:
+ self.worker.connection.abandon(0)
+ continue
+
+ with self.worker.worker_lock:
+ self.worker.busy = True
+ if counter == TERMINATE_REUSABLE:
+ terminate = True
+ if self.worker.connection.bound:
+ try:
+ self.worker.connection.unbind()
+ if log_enabled(BASIC):
+ log(BASIC, 'thread terminated')
+ except LDAPExceptionError:
+ pass
+ else:
+ if (datetime.now() - self.worker.creation_time).seconds >= self.master_connection.strategy.pool.lifetime: # destroy and create a new connection
+ try:
+ self.worker.connection.unbind()
+ except LDAPExceptionError:
+ pass
+ self.worker.new_connection()
+ if log_enabled(BASIC):
+ log(BASIC, 'thread respawn')
+ if message_type not in ['bindRequest', 'unbindRequest']:
+ try:
+ if pool.open_pool and self.worker.connection.closed:
+ self.worker.connection.open(read_server_info=False)
+ if pool.tls_pool and not self.worker.connection.tls_started:
+ self.worker.connection.start_tls(read_server_info=False)
+ if pool.bind_pool and not self.worker.connection.bound:
+ self.worker.connection.bind(read_server_info=False)
+ elif pool.open_pool and not self.worker.connection.closed: # connection already open, issues a start_tls
+ if pool.tls_pool and not self.worker.connection.tls_started:
+ self.worker.connection.start_tls(read_server_info=False)
+ if self.worker.get_info_from_server and counter:
+ self.worker.connection.refresh_server_info()
+ self.worker.get_info_from_server = False
+ response = None
+ result = None
+ if message_type == 'searchRequest':
+ response = self.worker.connection.post_send_search(self.worker.connection.send(message_type, request, controls))
+ else:
+ response = self.worker.connection.post_send_single_response(self.worker.connection.send(message_type, request, controls))
+ result = self.worker.connection.result
+ with pool.pool_lock:
+ pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
+ except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread
+ with pool.pool_lock:
+ pool._incoming[counter] = (e, None, None)
+ # pool._incoming[counter] = (type(e)(str(e)), None, None)
+ # except LDAPOperationResult as e: # raise_exceptions has raised an exception. It must be redirected to the original connection thread
+ # exc = e
+ # with pool.pool_lock:
+ # if exc:
+ # pool._incoming[counter] = (exc, None, None)
+ # else:
+ # pool._incoming[counter] = (response, result, BaseStrategy.decode_request(message_type, request, controls))
+
+ self.worker.busy = False
+ pool.request_queue.task_done()
+ self.worker.task_counter += 1
+ if log_enabled(BASIC):
+ log(BASIC, 'thread terminated')
+ if self.master_connection.usage:
+ pool.terminated_usage += self.worker.connection.usage
+ self.worker.running = False
+
+ class PooledConnectionWorker(object):
+ """
+ Container for the restartable connection. it includes a thread and a lock to execute the connection in the pool
+ """
+ def __init__(self, connection, request_queue):
+ self.master_connection = connection
+ self.request_queue = request_queue
+ self.running = False
+ self.busy = False
+ self.get_info_from_server = False
+ self.connection = None
+ self.creation_time = None
+ self.task_counter = 0
+ self.new_connection()
+ self.thread = ReusableStrategy.PooledConnectionThread(self, self.master_connection)
+ self.worker_lock = Lock()
+ if log_enabled(BASIC):
+ log(BASIC, 'instantiated PooledConnectionWorker: <%s>', self)
+
+ def __str__(self):
+ s = 'CONN: ' + str(self.connection) + linesep + ' THREAD: '
+ s += 'running' if self.running else 'halted'
+ s += ' - ' + ('busy' if self.busy else 'available')
+ s += ' - ' + ('created at: ' + self.creation_time.isoformat())
+ s += ' - time to live: ' + str(self.master_connection.strategy.pool.lifetime - (datetime.now() - self.creation_time).seconds)
+ s += ' - requests served: ' + str(self.task_counter)
+
+ return s
+
+ def new_connection(self):
+ from ..core.connection import Connection
+ # noinspection PyProtectedMember
+ self.creation_time = datetime.now()
+ self.connection = Connection(server=self.master_connection.server_pool if self.master_connection.server_pool else self.master_connection.server,
+ user=self.master_connection.user,
+ password=self.master_connection.password,
+ auto_bind=AUTO_BIND_NONE, # do not perform auto_bind because it reads again the schema
+ version=self.master_connection.version,
+ authentication=self.master_connection.authentication,
+ client_strategy=RESTARTABLE,
+ auto_referrals=self.master_connection.auto_referrals,
+ auto_range=self.master_connection.auto_range,
+ sasl_mechanism=self.master_connection.sasl_mechanism,
+ sasl_credentials=self.master_connection.sasl_credentials,
+ check_names=self.master_connection.check_names,
+ collect_usage=self.master_connection._usage,
+ read_only=self.master_connection.read_only,
+ raise_exceptions=self.master_connection.raise_exceptions,
+ lazy=False,
+ fast_decoder=self.master_connection.fast_decoder,
+ receive_timeout=self.master_connection.receive_timeout,
+ return_empty_attributes=self.master_connection.empty_attributes)
+
+ # simulates auto_bind, always with read_server_info=False
+ if self.master_connection.auto_bind and self.master_connection.auto_bind not in [AUTO_BIND_NONE, AUTO_BIND_DEFAULT]:
+ if log_enabled(BASIC):
+ log(BASIC, 'performing automatic bind for <%s>', self.connection)
+ self.connection.open(read_server_info=False)
+ if self.master_connection.auto_bind == AUTO_BIND_NO_TLS:
+ self.connection.bind(read_server_info=False)
+ elif self.master_connection.auto_bind == AUTO_BIND_TLS_BEFORE_BIND:
+ self.connection.start_tls(read_server_info=False)
+ self.connection.bind(read_server_info=False)
+ elif self.master_connection.auto_bind == AUTO_BIND_TLS_AFTER_BIND:
+ self.connection.bind(read_server_info=False)
+ self.connection.start_tls(read_server_info=False)
+
+ if self.master_connection.server_pool:
+ self.connection.server_pool = self.master_connection.server_pool
+ self.connection.server_pool.initialize(self.connection)
+
+ # ReusableStrategy methods
+ def __init__(self, ldap_connection):
+ BaseStrategy.__init__(self, ldap_connection)
+ self.sync = False
+ self.no_real_dsa = False
+ self.pooled = True
+ self.can_stream = False
+ if hasattr(ldap_connection, 'pool_name') and ldap_connection.pool_name:
+ self.pool = ReusableStrategy.ConnectionPool(ldap_connection)
+ else:
+ if log_enabled(ERROR):
+ log(ERROR, 'reusable connection must have a pool_name')
+ raise LDAPConnectionPoolNameIsMandatoryError('reusable connection must have a pool_name')
+
+ def open(self, reset_usage=True, read_server_info=True):
+ # read_server_info not used
+ self.pool.open_pool = True
+ self.pool.start_pool()
+ self.connection.closed = False
+ if self.connection.usage:
+ if reset_usage or not self.connection._usage.initial_connection_start_time:
+ self.connection._usage.start()
+
+ def terminate(self):
+ self.pool.terminate_pool()
+ self.pool.open_pool = False
+ self.connection.bound = False
+ self.connection.closed = True
+ self.pool.bind_pool = False
+ self.pool.tls_pool = False
+
+ def _close_socket(self):
+ """
+ Doesn't really close the socket
+ """
+ self.connection.closed = True
+
+ if self.connection.usage:
+ self.connection._usage.closed_sockets += 1
+
+ def send(self, message_type, request, controls=None):
+ if self.pool.started:
+ if message_type == 'bindRequest':
+ self.pool.bind_pool = True
+ counter = BOGUS_BIND
+ elif message_type == 'unbindRequest':
+ self.pool.bind_pool = False
+ counter = BOGUS_UNBIND
+ elif message_type == 'abandonRequest':
+ counter = BOGUS_ABANDON
+ elif message_type == 'extendedReq' and self.connection.starting_tls:
+ self.pool.tls_pool = True
+ counter = BOGUS_EXTENDED
+ else:
+ with self.pool.pool_lock:
+ self.pool.counter += 1
+ if self.pool.counter > LDAP_MAX_INT:
+ self.pool.counter = 1
+ counter = self.pool.counter
+ self.pool.request_queue.put((counter, message_type, request, controls))
+ return counter
+ if log_enabled(ERROR):
+ log(ERROR, 'reusable connection pool not started')
+ raise LDAPConnectionPoolNotStartedError('reusable connection pool not started')
+
+ def validate_bind(self, controls):
+ # in case of a new connection or different credentials
+ if (self.connection.user != self.pool.master_connection.user or
+ self.connection.password != self.pool.master_connection.password or
+ self.connection.authentication != self.pool.master_connection.authentication or
+ self.connection.sasl_mechanism != self.pool.master_connection.sasl_mechanism or
+ self.connection.sasl_credentials != self.pool.master_connection.sasl_credentials):
+ self.pool.master_connection.user = self.connection.user
+ self.pool.master_connection.password = self.connection.password
+ self.pool.master_connection.authentication = self.connection.authentication
+ self.pool.master_connection.sasl_mechanism = self.connection.sasl_mechanism
+ self.pool.master_connection.sasl_credentials = self.connection.sasl_credentials
+ self.pool.rebind_pool()
+ temp_connection = self.pool.workers[0].connection
+ old_lazy = temp_connection.lazy
+ temp_connection.lazy = False
+ if not self.connection.server.schema or not self.connection.server.info:
+ result = self.pool.workers[0].connection.bind(controls=controls)
+ else:
+ result = self.pool.workers[0].connection.bind(controls=controls, read_server_info=False)
+
+ temp_connection.unbind()
+ temp_connection.lazy = old_lazy
+ if result:
+ self.pool.bind_pool = True # bind pool if bind is validated
+ return result
+
+ def get_response(self, counter, timeout=None, get_request=False):
+ sleeptime = get_config_parameter('RESPONSE_SLEEPTIME')
+ request=None
+ if timeout is None:
+ timeout = get_config_parameter('RESPONSE_WAITING_TIMEOUT')
+ if counter == BOGUS_BIND: # send a bogus bindResponse
+ response = list()
+ result = {'description': 'success', 'referrals': None, 'type': 'bindResponse', 'result': 0, 'dn': '', 'message': '<bogus Bind response>', 'saslCreds': None}
+ elif counter == BOGUS_UNBIND: # bogus unbind response
+ response = None
+ result = None
+ elif counter == BOGUS_ABANDON: # abandon cannot be executed because of multiple connections
+ response = list()
+ result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
+ elif counter == BOGUS_EXTENDED: # bogus startTls extended response
+ response = list()
+ result = {'result': 0, 'referrals': None, 'responseName': '1.3.6.1.4.1.1466.20037', 'type': 'extendedResp', 'description': 'success', 'responseValue': 'None', 'dn': '', 'message': '<bogus StartTls response>'}
+ self.connection.starting_tls = False
+ else:
+ response = None
+ result = None
+ while timeout >= 0: # waiting for completed message to appear in _incoming
+ try:
+ with self.connection.strategy.pool.pool_lock:
+ response, result, request = self.connection.strategy.pool._incoming.pop(counter)
+ except KeyError:
+ sleep(sleeptime)
+ timeout -= sleeptime
+ continue
+ break
+
+ if timeout <= 0:
+ if log_enabled(ERROR):
+ log(ERROR, 'no response from worker threads in Reusable connection')
+ raise LDAPResponseTimeoutError('no response from worker threads in Reusable connection')
+
+ if isinstance(response, LDAPOperationResult):
+ raise response # an exception has been raised with raise_exceptions
+
+ if get_request:
+ return response, result, request
+
+ return response, result
+
+ def post_send_single_response(self, counter):
+ return counter
+
+ def post_send_search(self, counter):
+ return counter
diff --git a/ldap3/strategy/sync.py b/ldap3/strategy/sync.py
index e1fb043..fdb1441 100644
--- a/ldap3/strategy/sync.py
+++ b/ldap3/strategy/sync.py
@@ -1,215 +1,212 @@
-"""
-"""
-
-# Created on 2013.07.15
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-import socket
-
-from .. import SEQUENCE_TYPES, get_config_parameter
-from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult
-from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR
-from ..protocol.rfc4511 import LDAPMessage
-from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message
-from ..utils.asn1 import decoder, decode_message_fast
-
-LDAP_MESSAGE_TEMPLATE = LDAPMessage()
-
-
-# noinspection PyProtectedMember
-class SyncStrategy(BaseStrategy):
- """
- This strategy is synchronous. You send the request and get the response
- Requests return a boolean value to indicate the result of the requested Operation
- Connection.response will contain the whole LDAP response for the messageId requested in a dict form
- Connection.request will contain the result LDAP message in a dict form
- """
-
- def __init__(self, ldap_connection):
- BaseStrategy.__init__(self, ldap_connection)
- self.sync = True
- self.no_real_dsa = False
- self.pooled = False
- self.can_stream = False
- self.socket_size = get_config_parameter('SOCKET_SIZE')
-
- def open(self, reset_usage=True, read_server_info=True):
- BaseStrategy.open(self, reset_usage, read_server_info)
- if read_server_info:
- try:
- self.connection.refresh_server_info()
- except LDAPOperationResult: # catch errors from server if raise_exception = True
- self.connection.server._dsa_info = None
- self.connection.server._schema_info = None
-
- def _start_listen(self):
- if not self.connection.listening and not self.connection.closed:
- self.connection.listening = True
-
- def receiving(self):
- """
- Receive data over the socket
- Checks if the socket is closed
- """
- messages = []
- receiving = True
- unprocessed = b''
- data = b''
- get_more_data = True
- exc = None
- while receiving:
- if get_more_data:
- try:
- data = self.connection.socket.recv(self.socket_size)
- except (OSError, socket.error, AttributeError) as e:
- self.connection.last_error = 'error receiving data: ' + str(e)
- exc = e
-
- if exc:
- try: # try to close the connection before raising exception
- self.close()
- except (socket.error, LDAPExceptionError):
- pass
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error)
-
- unprocessed += data
- if len(data) > 0:
- length = BaseStrategy.compute_ldap_message_size(unprocessed)
- if length == -1: # too few data to decode message length
- get_more_data = True
- continue
- if len(unprocessed) < length:
- get_more_data = True
- else:
- if log_enabled(NETWORK):
- log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection)
- messages.append(unprocessed[:length])
- unprocessed = unprocessed[length:]
- get_more_data = False
- if len(unprocessed) == 0:
- receiving = False
- else:
- receiving = False
-
- if log_enabled(NETWORK):
- log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection)
- return messages
-
- def post_send_single_response(self, message_id):
- """
- Executed after an Operation Request (except Search)
- Returns the result message or None
- """
- responses, result = self.get_response(message_id)
- self.connection.result = result
- if result['type'] == 'intermediateResponse': # checks that all responses are intermediates (there should be only one)
- for response in responses:
- if response['type'] != 'intermediateResponse':
- self.connection.last_error = 'multiple messages received error'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSocketReceiveError(self.connection.last_error)
-
- responses.append(result)
- return responses
-
- def post_send_search(self, message_id):
- """
- Executed after a search request
- Returns the result message and store in connection.response the objects found
- """
- responses, result = self.get_response(message_id)
- self.connection.result = result
- if isinstance(responses, SEQUENCE_TYPES):
- self.connection.response = responses[:] # copy search result entries
- return responses
-
- self.connection.last_error = 'error receiving response'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSocketReceiveError(self.connection.last_error)
-
- def _get_response(self, message_id):
- """
- Performs the capture of LDAP response for SyncStrategy
- """
- ldap_responses = []
- response_complete = False
- while not response_complete:
- responses = self.receiving()
- if responses:
- for response in responses:
- if len(response) > 0:
- if self.connection.usage:
- self.connection._usage.update_received_message(len(response))
- if self.connection.fast_decoder:
- ldap_resp = decode_message_fast(response)
- dict_response = self.decode_response_fast(ldap_resp)
- else:
- ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE) # unprocessed unused because receiving() waits for the whole message
- dict_response = self.decode_response(ldap_resp)
- if log_enabled(EXTENDED):
- log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
- if int(ldap_resp['messageID']) == message_id:
- ldap_responses.append(dict_response)
- if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
- response_complete = True
- elif int(ldap_resp['messageID']) == 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
- if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
- return SESSION_TERMINATED_BY_SERVER
- elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4': # Novell LDAP transaction error unsolicited notification
- return TRANSACTION_ERROR
- else:
- self.connection.last_error = 'unknown unsolicited notification from server'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSocketReceiveError(self.connection.last_error)
- elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp':
- self.connection.last_error = 'multiple extended responses to a single extended request'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPExtensionError(self.connection.last_error)
- # pass # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it
- else:
- self.connection.last_error = 'invalid messageId received'
- if log_enabled(ERROR):
- log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- raise LDAPSocketReceiveError(self.connection.last_error)
- # response = unprocessed
- # if response: # if this statement is removed unprocessed data will be processed as another message
- # self.connection.last_error = 'unprocessed substrate error'
- # if log_enabled(ERROR):
- # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
- # raise LDAPSocketReceiveError(self.connection.last_error)
- else:
- return SESSION_TERMINATED_BY_SERVER
- ldap_responses.append(RESPONSE_COMPLETE)
-
- return ldap_responses
-
- def set_stream(self, value):
- raise NotImplementedError
-
- def get_stream(self):
- raise NotImplementedError
+"""
+"""
+
+# Created on 2013.07.15
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import socket
+
+from .. import SEQUENCE_TYPES, get_config_parameter
+from ..core.exceptions import LDAPSocketReceiveError, communication_exception_factory, LDAPExceptionError, LDAPExtensionError, LDAPOperationResult
+from ..strategy.base import BaseStrategy, SESSION_TERMINATED_BY_SERVER, RESPONSE_COMPLETE, TRANSACTION_ERROR
+from ..protocol.rfc4511 import LDAPMessage
+from ..utils.log import log, log_enabled, ERROR, NETWORK, EXTENDED, format_ldap_message
+from ..utils.asn1 import decoder, decode_message_fast
+
+LDAP_MESSAGE_TEMPLATE = LDAPMessage()
+
+
+# noinspection PyProtectedMember
+class SyncStrategy(BaseStrategy):
+ """
+ This strategy is synchronous. You send the request and get the response
+ Requests return a boolean value to indicate the result of the requested Operation
+ Connection.response will contain the whole LDAP response for the messageId requested in a dict form
+ Connection.request will contain the result LDAP message in a dict form
+ """
+
+ def __init__(self, ldap_connection):
+ BaseStrategy.__init__(self, ldap_connection)
+ self.sync = True
+ self.no_real_dsa = False
+ self.pooled = False
+ self.can_stream = False
+ self.socket_size = get_config_parameter('SOCKET_SIZE')
+
+ def open(self, reset_usage=True, read_server_info=True):
+ BaseStrategy.open(self, reset_usage, read_server_info)
+ if read_server_info:
+ try:
+ self.connection.refresh_server_info()
+ except LDAPOperationResult: # catch errors from server if raise_exception = True
+ self.connection.server._dsa_info = None
+ self.connection.server._schema_info = None
+
+ def _start_listen(self):
+ if not self.connection.listening and not self.connection.closed:
+ self.connection.listening = True
+
+ def receiving(self):
+ """
+ Receives data over the socket
+ Checks if the socket is closed
+ """
+ messages = []
+ receiving = True
+ unprocessed = b''
+ data = b''
+ get_more_data = True
+ exc = None
+ while receiving:
+ if get_more_data:
+ try:
+ data = self.connection.socket.recv(self.socket_size)
+ except (OSError, socket.error, AttributeError) as e:
+ self.connection.last_error = 'error receiving data: ' + str(e)
+ try: # try to close the connection before raising exception
+ self.close()
+ except (socket.error, LDAPExceptionError):
+ pass
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise communication_exception_factory(LDAPSocketReceiveError, exc)(self.connection.last_error)
+ raise communication_exception_factory(LDAPSocketReceiveError, type(e)(str(e)))(self.connection.last_error)
+ unprocessed += data
+ if len(data) > 0:
+ length = BaseStrategy.compute_ldap_message_size(unprocessed)
+ if length == -1: # too few data to decode message length
+ get_more_data = True
+ continue
+ if len(unprocessed) < length:
+ get_more_data = True
+ else:
+ if log_enabled(NETWORK):
+ log(NETWORK, 'received %d bytes via <%s>', len(unprocessed[:length]), self.connection)
+ messages.append(unprocessed[:length])
+ unprocessed = unprocessed[length:]
+ get_more_data = False
+ if len(unprocessed) == 0:
+ receiving = False
+ else:
+ receiving = False
+
+ if log_enabled(NETWORK):
+ log(NETWORK, 'received %d ldap messages via <%s>', len(messages), self.connection)
+ return messages
+
+ def post_send_single_response(self, message_id):
+ """
+ Executed after an Operation Request (except Search)
+ Returns the result message or None
+ """
+ responses, result = self.get_response(message_id)
+ self.connection.result = result
+ if result['type'] == 'intermediateResponse': # checks that all responses are intermediates (there should be only one)
+ for response in responses:
+ if response['type'] != 'intermediateResponse':
+ self.connection.last_error = 'multiple messages received error'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSocketReceiveError(self.connection.last_error)
+
+ responses.append(result)
+ return responses
+
+ def post_send_search(self, message_id):
+ """
+ Executed after a search request
+ Returns the result message and store in connection.response the objects found
+ """
+ responses, result = self.get_response(message_id)
+ self.connection.result = result
+ if isinstance(responses, SEQUENCE_TYPES):
+ self.connection.response = responses[:] # copy search result entries
+ return responses
+
+ self.connection.last_error = 'error receiving response'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSocketReceiveError(self.connection.last_error)
+
+ def _get_response(self, message_id, timeout):
+ """
+ Performs the capture of LDAP response for SyncStrategy
+ """
+ ldap_responses = []
+ response_complete = False
+ while not response_complete:
+ responses = self.receiving()
+ if responses:
+ for response in responses:
+ if len(response) > 0:
+ if self.connection.usage:
+ self.connection._usage.update_received_message(len(response))
+ if self.connection.fast_decoder:
+ ldap_resp = decode_message_fast(response)
+ dict_response = self.decode_response_fast(ldap_resp)
+ else:
+ ldap_resp, _ = decoder.decode(response, asn1Spec=LDAP_MESSAGE_TEMPLATE) # unprocessed unused because receiving() waits for the whole message
+ dict_response = self.decode_response(ldap_resp)
+ if log_enabled(EXTENDED):
+ log(EXTENDED, 'ldap message received via <%s>:%s', self.connection, format_ldap_message(ldap_resp, '<<'))
+ if int(ldap_resp['messageID']) == message_id:
+ ldap_responses.append(dict_response)
+ if dict_response['type'] not in ['searchResEntry', 'searchResRef', 'intermediateResponse']:
+ response_complete = True
+ elif int(ldap_resp['messageID']) == 0: # 0 is reserved for 'Unsolicited Notification' from server as per RFC4511 (paragraph 4.4)
+ if dict_response['responseName'] == '1.3.6.1.4.1.1466.20036': # Notice of Disconnection as per RFC4511 (paragraph 4.4.1)
+ return SESSION_TERMINATED_BY_SERVER
+ elif dict_response['responseName'] == '2.16.840.1.113719.1.27.103.4': # Novell LDAP transaction error unsolicited notification
+ return TRANSACTION_ERROR
+ else:
+ self.connection.last_error = 'unknown unsolicited notification from server'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSocketReceiveError(self.connection.last_error)
+ elif int(ldap_resp['messageID']) != message_id and dict_response['type'] == 'extendedResp':
+ self.connection.last_error = 'multiple extended responses to a single extended request'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPExtensionError(self.connection.last_error)
+ # pass # ignore message with invalid messageId when receiving multiple extendedResp. This is not allowed by RFC4511 but some LDAP server do it
+ else:
+ self.connection.last_error = 'invalid messageId received'
+ if log_enabled(ERROR):
+ log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ raise LDAPSocketReceiveError(self.connection.last_error)
+ # response = unprocessed
+ # if response: # if this statement is removed unprocessed data will be processed as another message
+ # self.connection.last_error = 'unprocessed substrate error'
+ # if log_enabled(ERROR):
+ # log(ERROR, '<%s> for <%s>', self.connection.last_error, self.connection)
+ # raise LDAPSocketReceiveError(self.connection.last_error)
+ else:
+ return SESSION_TERMINATED_BY_SERVER
+ ldap_responses.append(RESPONSE_COMPLETE)
+
+ return ldap_responses
+
+ def set_stream(self, value):
+ raise NotImplementedError
+
+ def get_stream(self):
+ raise NotImplementedError
diff --git a/ldap3/utils/asn1.py b/ldap3/utils/asn1.py
index 6b0b0bb..1b6091d 100644
--- a/ldap3/utils/asn1.py
+++ b/ldap3/utils/asn1.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/utils/ciDict.py b/ldap3/utils/ciDict.py
index 25fcd4c..c51d7ff 100644
--- a/ldap3/utils/ciDict.py
+++ b/ldap3/utils/ciDict.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -23,11 +23,15 @@
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
-import collections
+try:
+ from collections.abc import MutableMapping, Mapping
+except ImportError:
+ from collections import MutableMapping, Mapping
+
from .. import SEQUENCE_TYPES
-class CaseInsensitiveDict(collections.MutableMapping):
+class CaseInsensitiveDict(MutableMapping):
def __init__(self, other=None, **kwargs):
self._store = dict() # store use the original key
self._case_insensitive_keymap = dict() # is a mapping ci_key -> key
@@ -85,7 +89,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
return self._store.items()
def __eq__(self, other):
- if not isinstance(other, (collections.Mapping, dict)):
+ if not isinstance(other, (Mapping, dict)):
return NotImplemented
if isinstance(other, CaseInsensitiveDict):
@@ -139,7 +143,7 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict):
if ci_key in self._aliases:
self.remove_alias(ci_key)
- def set_alias(self, key, alias):
+ def set_alias(self, key, alias, ignore_duplicates=False):
if not isinstance(alias, SEQUENCE_TYPES):
alias = [alias]
for alias_to_add in alias:
@@ -149,23 +153,28 @@ class CaseInsensitiveWithAliasDict(CaseInsensitiveDict):
if ci_alias not in self._case_insensitive_keymap: # checks if alias is used a key
if ci_alias not in self._aliases: # checks if alias is used as another alias
self._aliases[ci_alias] = ci_key
- if ci_key in self._alias_keymap: # extend alias keymap
+ if ci_key in self._alias_keymap: # extends alias keymap
self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
else:
self._alias_keymap[ci_key] = list()
self._alias_keymap[ci_key].append(self._ci_key(ci_alias))
else:
- if ci_key == self._ci_key(self._alias_keymap[ci_alias]): # passes if alias is already defined to the same key
+ if ci_key in self._alias_keymap and ci_alias in self._alias_keymap[ci_key]: # passes if alias is already defined to the same key
pass
- else:
+ elif not ignore_duplicates:
raise KeyError('\'' + str(alias_to_add) + '\' already used as alias')
else:
if ci_key == self._ci_key(self._case_insensitive_keymap[ci_alias]): # passes if alias is already defined to the same key
pass
- else:
+ elif not ignore_duplicates:
raise KeyError('\'' + str(alias_to_add) + '\' already used as key')
else:
- raise KeyError('\'' + str(ci_key) + '\' is not an existing key')
+ for keymap in self._alias_keymap:
+ if ci_key in self._alias_keymap[keymap]: # kye is already aliased
+ self.set_alias(keymap, alias + [ci_key], ignore_duplicates=ignore_duplicates)
+ break
+ else:
+ raise KeyError('\'' + str(ci_key) + '\' is not an existing alias or key')
def remove_alias(self, alias):
if not isinstance(alias, SEQUENCE_TYPES):
diff --git a/ldap3/utils/config.py b/ldap3/utils/config.py
index 3da1c94..e3edbf8 100644
--- a/ldap3/utils/config.py
+++ b/ldap3/utils/config.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -88,6 +88,7 @@ _REUSABLE_THREADED_LIFETIME = 3600 # 1 hour
_DEFAULT_THREADED_POOL_NAME = 'REUSABLE_DEFAULT_POOL'
_ADDRESS_INFO_REFRESH_TIME = 300 # seconds to wait before refreshing address info from dns
_ADDITIONAL_SERVER_ENCODINGS = ['latin-1', 'koi8-r'] # some broken LDAP implementation may have different encoding than those expected by RFCs
+_ADDITIONAL_CLIENT_ENCODINGS = ['utf-8']
_IGNORE_MALFORMED_SCHEMA = False # some flaky LDAP servers returns malformed schema. If True no expection is raised and schema is thrown away
_DEFAULT_SERVER_ENCODING = 'utf-8' # should always be utf-8
@@ -98,6 +99,34 @@ elif getdefaultencoding():
else:
_DEFAULT_CLIENT_ENCODING = 'utf-8'
+PARAMETERS = ['CASE_INSENSITIVE_ATTRIBUTE_NAMES',
+ 'CASE_INSENSITIVE_SCHEMA_NAMES',
+ 'ABSTRACTION_OPERATIONAL_ATTRIBUTE_PREFIX',
+ 'POOLING_LOOP_TIMEOUT',
+ 'RESPONSE_SLEEPTIME',
+ 'RESPONSE_WAITING_TIMEOUT',
+ 'SOCKET_SIZE',
+ 'CHECK_AVAILABILITY_TIMEOUT',
+ 'RESTARTABLE_SLEEPTIME',
+ 'RESTARTABLE_TRIES',
+ 'REUSABLE_THREADED_POOL_SIZE',
+ 'REUSABLE_THREADED_LIFETIME',
+ 'DEFAULT_THREADED_POOL_NAME',
+ 'ADDRESS_INFO_REFRESH_TIME',
+ 'RESET_AVAILABILITY_TIMEOUT',
+ 'DEFAULT_CLIENT_ENCODING',
+ 'DEFAULT_SERVER_ENCODING',
+ 'CLASSES_EXCLUDED_FROM_CHECK',
+ 'ATTRIBUTES_EXCLUDED_FROM_CHECK',
+ 'UTF8_ENCODED_SYNTAXES',
+ 'UTF8_ENCODED_TYPES',
+ 'ADDITIONAL_SERVER_ENCODINGS',
+ 'ADDITIONAL_CLIENT_ENCODINGS',
+ 'IGNORE_MALFORMED_SCHEMA',
+ 'ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF',
+ 'IGNORED_MANDATORY_ATTRIBUTES_IN_OBJECT_DEF'
+ ]
+
def get_config_parameter(parameter):
if parameter == 'CASE_INSENSITIVE_ATTRIBUTE_NAMES': # Boolean
@@ -130,7 +159,7 @@ def get_config_parameter(parameter):
return _ADDRESS_INFO_REFRESH_TIME
elif parameter == 'RESET_AVAILABILITY_TIMEOUT': # Integer
return _RESET_AVAILABILITY_TIMEOUT
- elif parameter in ['DEFAULT_CLIENT_ENCODING', 'DEFAULT_ENCODING']: # String
+ elif parameter in ['DEFAULT_CLIENT_ENCODING', 'DEFAULT_ENCODING']: # String - DEFAULT_ENCODING for backward compatibility
return _DEFAULT_CLIENT_ENCODING
elif parameter == 'DEFAULT_SERVER_ENCODING': # String
return _DEFAULT_SERVER_ENCODING
@@ -154,11 +183,16 @@ def get_config_parameter(parameter):
return _UTF8_ENCODED_TYPES
else:
return [_UTF8_ENCODED_TYPES]
- elif parameter in ['ADDITIONAL_SERVER_ENCODINGS', 'ADDITIONAL_ENCODINGS']: # Sequence
+ elif parameter in ['ADDITIONAL_SERVER_ENCODINGS', 'ADDITIONAL_ENCODINGS']: # Sequence - ADDITIONAL_ENCODINGS for backward compatibility
if isinstance(_ADDITIONAL_SERVER_ENCODINGS, SEQUENCE_TYPES):
return _ADDITIONAL_SERVER_ENCODINGS
else:
return [_ADDITIONAL_SERVER_ENCODINGS]
+ elif parameter in ['ADDITIONAL_CLIENT_ENCODINGS']: # Sequence
+ if isinstance(_ADDITIONAL_CLIENT_ENCODINGS, SEQUENCE_TYPES):
+ return _ADDITIONAL_CLIENT_ENCODINGS
+ else:
+ return [_ADDITIONAL_CLIENT_ENCODINGS]
elif parameter == 'IGNORE_MALFORMED_SCHEMA': # Boolean
return _IGNORE_MALFORMED_SCHEMA
elif parameter == 'ATTRIBUTES_EXCLUDED_FROM_OBJECT_DEF': # Sequence
@@ -242,6 +276,9 @@ def set_config_parameter(parameter, value):
elif parameter in ['ADDITIONAL_SERVER_ENCODINGS', 'ADDITIONAL_ENCODINGS']:
global _ADDITIONAL_SERVER_ENCODINGS
_ADDITIONAL_SERVER_ENCODINGS = value if isinstance(value, SEQUENCE_TYPES) else [value]
+ elif parameter in ['ADDITIONAL_CLIENT_ENCODINGS']:
+ global _ADDITIONAL_CLIENT_ENCODINGS
+ _ADDITIONAL_CLIENT_ENCODINGS = value if isinstance(value, SEQUENCE_TYPES) else [value]
elif parameter == 'IGNORE_MALFORMED_SCHEMA':
global _IGNORE_MALFORMED_SCHEMA
_IGNORE_MALFORMED_SCHEMA = value
diff --git a/ldap3/utils/conv.py b/ldap3/utils/conv.py
index 0e11b4d..b000e30 100644
--- a/ldap3/utils/conv.py
+++ b/ldap3/utils/conv.py
@@ -1,224 +1,270 @@
-"""
-"""
-
-# Created on 2014.04.26
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from base64 import b64encode, b64decode
-import datetime
-import re
-
-from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter
-from ..utils.ciDict import CaseInsensitiveDict
-from ..core.exceptions import LDAPDefinitionError
-
-
-def to_unicode(obj, encoding=None, from_server=False):
- """Try to convert bytes (and str in python2) to unicode.
- Return object unmodified if python3 string, else raise an exception
- """
- conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING')
- conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING')
- conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS')
- if isinstance(obj, NUMERIC_TYPES):
- obj = str(obj)
-
- if isinstance(obj, (bytes, bytearray)):
- if from_server: # data from server
- if encoding is None:
- encoding = conf_default_server_encoding
- try:
- return obj.decode(encoding)
- except UnicodeDecodeError:
- for encoding in conf_additional_server_encodings: # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510)
- try:
- return obj.decode(encoding)
- except UnicodeDecodeError:
- pass
- raise UnicodeError("Unable to convert server data to unicode: %r" % obj)
- else: # data from client
- if encoding is None:
- encoding = conf_default_client_encoding
- try:
- return obj.decode(encoding)
- except UnicodeDecodeError:
- raise UnicodeError("Unable to convert client data to unicode: %r" % obj)
-
- if isinstance(obj, STRING_TYPES): # python3 strings, python 2 unicode
- return obj
-
- raise UnicodeError("Unable to convert type %s to unicode: %r" % (type(obj).__class__.__name__, obj))
-
-def to_raw(obj, encoding='utf-8'):
- """Tries to convert to raw bytes from unicode"""
- if isinstance(obj, NUMERIC_TYPES):
- obj = str(obj)
-
- if not (isinstance(obj, bytes)):
- if isinstance(obj, SEQUENCE_TYPES):
- return [to_raw(element) for element in obj]
- elif isinstance(obj, STRING_TYPES):
- return obj.encode(encoding)
-
- return obj
-
-
-def escape_filter_chars(text, encoding=None):
- """ Escape chars mentioned in RFC4515. """
-
- if encoding is None:
- encoding = get_config_parameter('DEFAULT_ENCODING')
-
- text = to_unicode(text, encoding)
- escaped = text.replace('\\', '\\5c')
- escaped = escaped.replace('*', '\\2a')
- escaped = escaped.replace('(', '\\28')
- escaped = escaped.replace(')', '\\29')
- escaped = escaped.replace('\x00', '\\00')
- # escape all octets greater than 0x7F that are not part of a valid UTF-8
- # escaped = ''.join(c if c <= '\x7f' else escape_bytes(to_raw(to_unicode(c, encoding))) for c in output)
- return escaped
-
-
-def escape_bytes(bytes_value):
- """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string"""
- if bytes_value:
- if str is not bytes: # Python 3
- if isinstance(bytes_value, str):
- bytes_value = bytearray(bytes_value, encoding='utf-8')
- escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value])
- else: # Python 2
- if isinstance(bytes_value, unicode):
- bytes_value = bytes_value.encode('utf-8')
- escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value])
- else:
- escaped = ''
-
- return ('\\' + escaped) if escaped else ''
-
-
-def prepare_for_stream(value):
- if str is not bytes: # Python 3
- return value
- else: # Python 2
- return value.decode()
-
-
-# def check_escape(raw_string):
-# if isinstance(raw_string, bytes) or '\\' not in raw_string:
-# return raw_string
-#
-# escaped = ''
-# i = 0
-# while i < len(raw_string):
-# if raw_string[i] == '\\' and i < len(raw_string) - 2:
-# try:
-# value = int(raw_string[i + 1: i + 3], 16)
-# escaped += chr(value)
-# i += 2
-# except ValueError:
-# escaped += '\\\\'
-# else:
-# escaped += raw_string[i]
-# i += 1
-#
-# return escaped
-
-
-def json_encode_b64(obj):
- try:
- return dict(encoding='base64', encoded=b64encode(obj))
- except Exception as e:
- raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e))
-
-
-# noinspection PyProtectedMember
-def check_json_dict(json_dict):
- # needed for python 2
-
- for k, v in json_dict.items():
- if isinstance(v, dict):
- check_json_dict(v)
- elif isinstance(v, CaseInsensitiveDict):
- check_json_dict(v._store)
- elif isinstance(v, SEQUENCE_TYPES):
- for i, e in enumerate(v):
- if isinstance(e, dict):
- check_json_dict(e)
- elif isinstance(e, CaseInsensitiveDict):
- check_json_dict(e._store)
- else:
- v[i] = format_json(e)
- else:
- json_dict[k] = format_json(v)
-
-
-def json_hook(obj):
- if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys():
- return b64decode(obj['encoded'])
-
- return obj
-
-
-# noinspection PyProtectedMember
-def format_json(obj):
- if isinstance(obj, CaseInsensitiveDict):
- return obj._store
-
- if isinstance(obj, datetime.datetime):
- return str(obj)
-
- if isinstance(obj, int):
- return obj
-
- if str is bytes: # Python 2
- if isinstance(obj, long): # long exists only in python2
- return obj
-
- try:
- if str is not bytes: # Python 3
- if isinstance(obj, bytes):
- # return check_escape(str(obj, 'utf-8', errors='strict'))
- return str(obj, 'utf-8', errors='strict')
- raise LDAPDefinitionError('unable to serialize ' + str(obj))
- else: # Python 2
- if isinstance(obj, unicode):
- return obj
- else:
- # return unicode(check_escape(obj))
- return unicode(obj)
- except (TypeError, UnicodeDecodeError):
- pass
-
- try:
- return json_encode_b64(bytes(obj))
- except Exception:
- pass
-
- raise LDAPDefinitionError('unable to serialize ' + str(obj))
-
-
-def is_filter_escaped(text):
- if not type(text) == ((str is not bytes) and str or unicode): # requires str for Python 3 and unicode for Python 2
- raise ValueError('unicode input expected')
-
- return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text)
+"""
+"""
+
+# Created on 2014.04.26
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from base64 import b64encode, b64decode
+import datetime
+import re
+
+from .. import SEQUENCE_TYPES, STRING_TYPES, NUMERIC_TYPES, get_config_parameter
+from ..utils.ciDict import CaseInsensitiveDict
+from ..core.exceptions import LDAPDefinitionError
+
+
+def to_unicode(obj, encoding=None, from_server=False):
+ """Try to convert bytes (and str in python2) to unicode.
+ Return object unmodified if python3 string, else raise an exception
+ """
+ conf_default_client_encoding = get_config_parameter('DEFAULT_CLIENT_ENCODING')
+ conf_default_server_encoding = get_config_parameter('DEFAULT_SERVER_ENCODING')
+ conf_additional_server_encodings = get_config_parameter('ADDITIONAL_SERVER_ENCODINGS')
+ conf_additional_client_encodings = get_config_parameter('ADDITIONAL_CLIENT_ENCODINGS')
+ if isinstance(obj, NUMERIC_TYPES):
+ obj = str(obj)
+
+ if isinstance(obj, (bytes, bytearray)):
+ if from_server: # data from server
+ if encoding is None:
+ encoding = conf_default_server_encoding
+ try:
+ return obj.decode(encoding)
+ except UnicodeDecodeError:
+ for encoding in conf_additional_server_encodings: # AD could have DN not encoded in utf-8 (even if this is not allowed by RFC4510)
+ try:
+ return obj.decode(encoding)
+ except UnicodeDecodeError:
+ pass
+ raise UnicodeError("Unable to convert server data to unicode: %r" % obj)
+ else: # data from client
+ if encoding is None:
+ encoding = conf_default_client_encoding
+ try:
+ return obj.decode(encoding)
+ except UnicodeDecodeError:
+ for encoding in conf_additional_client_encodings: # tries additional encodings
+ try:
+ return obj.decode(encoding)
+ except UnicodeDecodeError:
+ pass
+ raise UnicodeError("Unable to convert client data to unicode: %r" % obj)
+
+ if isinstance(obj, STRING_TYPES): # python3 strings, python 2 unicode
+ return obj
+
+ raise UnicodeError("Unable to convert type %s to unicode: %r" % (obj.__class__.__name__, obj))
+
+
+def to_raw(obj, encoding='utf-8'):
+ """Tries to convert to raw bytes from unicode"""
+ if isinstance(obj, NUMERIC_TYPES):
+ obj = str(obj)
+
+ if not (isinstance(obj, bytes)):
+ if isinstance(obj, SEQUENCE_TYPES):
+ return [to_raw(element) for element in obj]
+ elif isinstance(obj, STRING_TYPES):
+ return obj.encode(encoding)
+ return obj
+
+
+def escape_filter_chars(text, encoding=None):
+ """ Escape chars mentioned in RFC4515. """
+ if encoding is None:
+ encoding = get_config_parameter('DEFAULT_ENCODING')
+
+ try:
+ text = to_unicode(text, encoding)
+ escaped = text.replace('\\', '\\5c')
+ escaped = escaped.replace('*', '\\2a')
+ escaped = escaped.replace('(', '\\28')
+ escaped = escaped.replace(')', '\\29')
+ escaped = escaped.replace('\x00', '\\00')
+ except Exception: # probably raw bytes values, return escaped bytes value
+ escaped = to_unicode(escape_bytes(text))
+ # escape all octets greater than 0x7F that are not part of a valid UTF-8
+ # escaped = ''.join(c if c <= ord(b'\x7f') else escape_bytes(to_raw(to_unicode(c, encoding))) for c in escaped)
+ return escaped
+
+
+def unescape_filter_chars(text, encoding=None):
+ """ unescape chars mentioned in RFC4515. """
+ if encoding is None:
+ encoding = get_config_parameter('DEFAULT_ENCODING')
+
+ unescaped = to_raw(text, encoding)
+ unescaped = unescaped.replace(b'\\5c', b'\\')
+ unescaped = unescaped.replace(b'\\5C', b'\\')
+ unescaped = unescaped.replace(b'\\2a', b'*')
+ unescaped = unescaped.replace(b'\\2A', b'*')
+ unescaped = unescaped.replace(b'\\28', b'(')
+ unescaped = unescaped.replace(b'\\29', b')')
+ unescaped = unescaped.replace(b'\\00', b'\x00')
+ return unescaped
+
+
+def escape_bytes(bytes_value):
+ """ Convert a byte sequence to a properly escaped for LDAP (format BACKSLASH HEX HEX) string"""
+ if bytes_value:
+ if str is not bytes: # Python 3
+ if isinstance(bytes_value, str):
+ bytes_value = bytearray(bytes_value, encoding='utf-8')
+ escaped = '\\'.join([('%02x' % int(b)) for b in bytes_value])
+ else: # Python 2
+ if isinstance(bytes_value, unicode):
+ bytes_value = bytes_value.encode('utf-8')
+ escaped = '\\'.join([('%02x' % ord(b)) for b in bytes_value])
+ else:
+ escaped = ''
+
+ return ('\\' + escaped) if escaped else ''
+
+
+def prepare_for_stream(value):
+ if str is not bytes: # Python 3
+ return value
+ else: # Python 2
+ return value.decode()
+
+
+def json_encode_b64(obj):
+ try:
+ return dict(encoding='base64', encoded=b64encode(obj))
+ except Exception as e:
+ raise LDAPDefinitionError('unable to encode ' + str(obj) + ' - ' + str(e))
+
+
+# noinspection PyProtectedMember
+def check_json_dict(json_dict):
+ # needed for python 2
+
+ for k, v in json_dict.items():
+ if isinstance(v, dict):
+ check_json_dict(v)
+ elif isinstance(v, CaseInsensitiveDict):
+ check_json_dict(v._store)
+ elif isinstance(v, SEQUENCE_TYPES):
+ for i, e in enumerate(v):
+ if isinstance(e, dict):
+ check_json_dict(e)
+ elif isinstance(e, CaseInsensitiveDict):
+ check_json_dict(e._store)
+ else:
+ v[i] = format_json(e)
+ else:
+ json_dict[k] = format_json(v)
+
+
+def json_hook(obj):
+ if hasattr(obj, 'keys') and len(list(obj.keys())) == 2 and 'encoding' in obj.keys() and 'encoded' in obj.keys():
+ return b64decode(obj['encoded'])
+
+ return obj
+
+
+# noinspection PyProtectedMember
+def format_json(obj):
+ if isinstance(obj, CaseInsensitiveDict):
+ return obj._store
+
+ if isinstance(obj, datetime.datetime):
+ return str(obj)
+
+ if isinstance(obj, int):
+ return obj
+
+ if isinstance(obj, datetime.timedelta):
+ return str(obj)
+
+ if str is bytes: # Python 2
+ if isinstance(obj, long): # long exists only in python2
+ return obj
+
+ try:
+ if str is not bytes: # Python 3
+ if isinstance(obj, bytes):
+ # return check_escape(str(obj, 'utf-8', errors='strict'))
+ return str(obj, 'utf-8', errors='strict')
+ raise LDAPDefinitionError('unable to serialize ' + str(obj))
+ else: # Python 2
+ if isinstance(obj, unicode):
+ return obj
+ else:
+ # return unicode(check_escape(obj))
+ return unicode(obj)
+ except (TypeError, UnicodeDecodeError):
+ pass
+
+ try:
+ return json_encode_b64(bytes(obj))
+ except Exception:
+ pass
+
+ raise LDAPDefinitionError('unable to serialize ' + str(obj))
+
+
+def is_filter_escaped(text):
+ if not type(text) == ((str is not bytes) and str or unicode): # requires str for Python 3 and unicode for Python 2
+ raise ValueError('unicode input expected')
+
+ return all(c not in text for c in '()*\0') and not re.search('\\\\([^0-9a-fA-F]|(.[^0-9a-fA-F]))', text)
+
+
+def ldap_escape_to_bytes(text):
+ bytesequence = bytearray()
+ i = 0
+ try:
+ if isinstance(text, STRING_TYPES):
+ while i < len(text):
+ if text[i] == '\\':
+ if len(text) > i + 2:
+ try:
+ bytesequence.append(int(text[i+1:i+3], 16))
+ i += 3
+ continue
+ except ValueError:
+ pass
+ bytesequence.append(92) # "\" ASCII code
+ else:
+ raw = to_raw(text[i])
+ for c in raw:
+ bytesequence.append(c)
+ i += 1
+ elif isinstance(text, (bytes, bytearray)):
+ while i < len(text):
+ if text[i] == 92: # "\" ASCII code
+ if len(text) > i + 2:
+ try:
+ bytesequence.append(int(text[i + 1:i + 3], 16))
+ i += 3
+ continue
+ except ValueError:
+ pass
+ bytesequence.append(92) # "\" ASCII code
+ else:
+ bytesequence.append(text[i])
+ i += 1
+ except Exception:
+ raise LDAPDefinitionError('badly formatted LDAP byte escaped sequence')
+
+ return bytes(bytesequence)
diff --git a/ldap3/utils/dn.py b/ldap3/utils/dn.py
index de9dd81..c2a1e66 100644
--- a/ldap3/utils/dn.py
+++ b/ldap3/utils/dn.py
@@ -1,357 +1,405 @@
-"""
-"""
-
-# Created on 2014.09.08
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-from string import hexdigits, ascii_letters, digits
-
-from .. import SEQUENCE_TYPES
-from ..core.exceptions import LDAPInvalidDnError
-
-
-STATE_ANY = 0
-STATE_ESCAPE = 1
-STATE_ESCAPE_HEX = 2
-
-
-def _add_ava(ava, decompose, remove_space, space_around_equal):
- if not ava:
- return ''
-
- space = ' ' if space_around_equal else ''
- attr_name, _, value = ava.partition('=')
- if decompose:
- if remove_space:
- component = (attr_name.strip(), value.strip())
- else:
- component = (attr_name, value)
- else:
- if remove_space:
- component = attr_name.strip() + space + '=' + space + value.strip()
- else:
- component = attr_name + space + '=' + space + value
-
- return component
-
-
-def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False):
- """
- Convert an iterator to a list of dn parts
- if decompose=True return a list of tuple (one for each dn component) else return a list of strings
- if remove_space=True removes unneeded spaces
- if space_around_equal=True add spaces around equal in returned strings
- if separate_rdn=True consider multiple RDNs as different component of DN
- """
- dn = []
- component = ''
- escape_sequence = False
- for c in iterator:
- if c == '\\': # escape sequence
- escape_sequence = True
- elif escape_sequence and c != ' ':
- escape_sequence = False
- elif c == '+' and separate_rdn:
- dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
- component = ''
- continue
- elif c == ',':
- if '=' in component:
- dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
- component = ''
- continue
-
- component += c
-
- dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
- return dn
-
-
-def _find_first_unescaped(dn, char, pos):
- while True:
- pos = dn.find(char, pos)
- if pos == -1:
- break # no char found
- if pos > 0 and dn[pos - 1] != '\\': # unescaped char
- break
-
- pos += 1
-
- return pos
-
-
-def _find_last_unescaped(dn, char, start, stop=0):
- while True:
- stop = dn.rfind(char, start, stop)
- if stop == -1:
- break
- if stop >= 0 and dn[stop - 1] != '\\':
- break
-
- if stop < start:
- stop = -1
- break
-
- return stop
-
-
-def _get_next_ava(dn):
- comma = _find_first_unescaped(dn, ',', 0)
- plus = _find_first_unescaped(dn, '+', 0)
-
- if plus > 0 and (plus < comma or comma == -1):
- equal = _find_first_unescaped(dn, '=', plus + 1)
- if equal > plus + 1:
- plus = _find_last_unescaped(dn, '+', plus, equal)
- return dn[:plus], '+'
-
- if comma > 0:
- equal = _find_first_unescaped(dn, '=', comma + 1)
- if equal > comma + 1:
- comma = _find_last_unescaped(dn, ',', comma, equal)
- return dn[:comma], ','
-
- return dn, ''
-
-
-def _split_ava(ava, escape=False, strip=True):
- equal = ava.find('=')
- while equal > 0: # not first character
- if ava[equal - 1] != '\\': # not an escaped equal so it must be an ava separator
- # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal]
- if strip:
- attribute_type = ava[0:equal].strip()
- attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip()
- else:
- attribute_type = ava[0:equal]
- attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:]
-
- return attribute_type, attribute_value
- equal = ava.find('=', equal + 1)
-
- return '', (ava.strip if strip else ava) # if no equal found return only value
-
-
-def _validate_attribute_type(attribute_type):
- if not attribute_type:
- raise LDAPInvalidDnError('attribute type not present')
-
- if attribute_type == '<GUID': # patch for AD DirSync
- return True
-
- for c in attribute_type:
- if not (c in ascii_letters or c in digits or c == '-'): # allowed uppercase and lowercase letters, digits and hyphen as per RFC 4512
- raise LDAPInvalidDnError('character \'' + c + '\' not allowed in attribute type')
-
- if attribute_type[0] in digits or attribute_type[0] == '-': # digits and hyphen not allowed as first character
- raise LDAPInvalidDnError('character \'' + attribute_type[0] + '\' not allowed as first character of attribute type')
-
- return True
-
-
-def _validate_attribute_value(attribute_value):
- if not attribute_value:
- return False
-
- if attribute_value[0] == '#': # only hex characters are valid
- for c in attribute_value:
- if 'c' not in hexdigits: # allowed only hex digits as per RFC 4514
- raise LDAPInvalidDnError('character ' + c + ' not allowed in hex representation of attribute value')
- if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
- raise LDAPInvalidDnError('hex representation must be in the form of <HEX><HEX> pairs')
- if attribute_value[0] == ' ': # space cannot be used as first or last character
- raise LDAPInvalidDnError('SPACE not allowed as first character of attribute value')
- if attribute_value[-1] == ' ':
- raise LDAPInvalidDnError('SPACE not allowed as last character of attribute value')
-
- state = STATE_ANY
- for c in attribute_value:
- if state == STATE_ANY:
- if c == '\\':
- state = STATE_ESCAPE
- elif c in '"#+,;<=>\00':
- raise LDAPInvalidDnError('special characters ' + c + ' must be escaped')
- elif state == STATE_ESCAPE:
- if c in hexdigits:
- state = STATE_ESCAPE_HEX
- elif c in ' "#+,;<=>\\\00':
- state = STATE_ANY
- else:
- raise LDAPInvalidDnError('invalid escaped character ' + c)
- elif state == STATE_ESCAPE_HEX:
- if c in hexdigits:
- state = STATE_ANY
- else:
- raise LDAPInvalidDnError('invalid escaped character ' + c)
-
- # final state
- if state != STATE_ANY:
- raise LDAPInvalidDnError('invalid final character')
-
- return True
-
-
-def _escape_attribute_value(attribute_value):
- if not attribute_value:
- return ''
-
- if attribute_value[0] == '#': # with leading SHARP only pairs of hex characters are valid
- valid_hex = True
- if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
- valid_hex = False
-
- if valid_hex:
- for c in attribute_value:
- if c not in hexdigits: # allowed only hex digits as per RFC 4514
- valid_hex = False
- break
-
- if valid_hex:
- return attribute_value
-
- state = STATE_ANY
- escaped = ''
- tmp_buffer = ''
- for c in attribute_value:
- if state == STATE_ANY:
- if c == '\\':
- state = STATE_ESCAPE
- elif c in '"#+,;<=>\00':
- escaped += '\\' + c
- else:
- escaped += c
- elif state == STATE_ESCAPE:
- if c in hexdigits:
- tmp_buffer = c
- state = STATE_ESCAPE_HEX
- elif c in ' "#+,;<=>\\\00':
- escaped += '\\' + c
- state = STATE_ANY
- else:
- escaped += '\\\\' + c
- elif state == STATE_ESCAPE_HEX:
- if c in hexdigits:
- escaped += '\\' + tmp_buffer + c
- else:
- escaped += '\\\\' + tmp_buffer + c
- tmp_buffer = ''
- state = STATE_ANY
-
- # final state
- if state == STATE_ESCAPE:
- escaped += '\\\\'
- elif state == STATE_ESCAPE_HEX:
- escaped += '\\\\' + tmp_buffer
-
- if escaped[0] == ' ': # leading SPACE must be escaped
- escaped = '\\' + escaped
-
- if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\': # trailing SPACE must be escaped
- escaped = escaped[:-1] + '\\ '
-
- return escaped
-
-
-def parse_dn(dn, escape=False, strip=True):
- rdns = []
- avas = []
- while dn:
- ava, separator = _get_next_ava(dn) # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas
-
- dn = dn[len(ava) + 1:]
- if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0:
- avas.append((ava, separator))
- else:
- avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator)
-
- for ava, separator in avas:
- attribute_type, attribute_value = _split_ava(ava, escape, strip)
-
- if not _validate_attribute_type(attribute_type):
- raise LDAPInvalidDnError('unable to validate attribute type in ' + ava)
-
- if not _validate_attribute_value(attribute_value):
- raise LDAPInvalidDnError('unable to validate attribute value in ' + ava)
-
- rdns.append((attribute_type, attribute_value, separator))
- dn = dn[len(ava) + 1:]
-
- if not rdns:
- raise LDAPInvalidDnError('empty dn')
-
- return rdns
-
-
-def safe_dn(dn, decompose=False, reverse=False):
- """
- normalize and escape a dn, if dn is a sequence it is joined.
- the reverse parameter change the join direction of the sequence
- """
- if isinstance(dn, SEQUENCE_TYPES):
- components = [rdn for rdn in dn]
- if reverse:
- dn = ','.join(reversed(components))
- else:
- dn = ','.join(components)
- if decompose:
- escaped_dn = []
- else:
- escaped_dn = ''
-
- if dn.startswith('<GUID=') and dn.endswith('>'): # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '<GUID=7b95f0d5-a3ed-486c-919c-077b8c9731f2>')
- escaped_dn = dn
- elif '@' not in dn and '\\' not in dn: # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username
- for component in parse_dn(dn, escape=True):
- if decompose:
- escaped_dn.append((component[0], component[1], component[2]))
- else:
- escaped_dn += component[0] + '=' + component[1] + component[2]
- elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2:
- raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain')
- elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2:
- raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain')
- else:
- escaped_dn = dn
-
- return escaped_dn
-
-
-def safe_rdn(dn, decompose=False):
- """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used"""
- escaped_rdn = []
- one_more = True
- for component in parse_dn(dn, escape=True):
- if component[2] == '+' or one_more:
- if decompose:
- escaped_rdn.append((component[0], component[1]))
- else:
- escaped_rdn.append(component[0] + '=' + component[1])
- if component[2] == '+':
- one_more = True
- else:
- one_more = False
- break
-
- if one_more:
- raise LDAPInvalidDnError('bad dn ' + str(dn))
-
- return escaped_rdn
+"""
+"""
+
+# Created on 2014.09.08
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+from string import hexdigits, ascii_letters, digits
+
+from .. import SEQUENCE_TYPES
+from ..core.exceptions import LDAPInvalidDnError
+
+
+STATE_ANY = 0
+STATE_ESCAPE = 1
+STATE_ESCAPE_HEX = 2
+
+
+def _add_ava(ava, decompose, remove_space, space_around_equal):
+ if not ava:
+ return ''
+
+ space = ' ' if space_around_equal else ''
+ attr_name, _, value = ava.partition('=')
+ if decompose:
+ if remove_space:
+ component = (attr_name.strip(), value.strip())
+ else:
+ component = (attr_name, value)
+ else:
+ if remove_space:
+ component = attr_name.strip() + space + '=' + space + value.strip()
+ else:
+ component = attr_name + space + '=' + space + value
+
+ return component
+
+
+def to_dn(iterator, decompose=False, remove_space=False, space_around_equal=False, separate_rdn=False):
+ """
+ Convert an iterator to a list of dn parts
+ if decompose=True return a list of tuple (one for each dn component) else return a list of strings
+ if remove_space=True removes unneeded spaces
+ if space_around_equal=True add spaces around equal in returned strings
+ if separate_rdn=True consider multiple RDNs as different component of DN
+ """
+ dn = []
+ component = ''
+ escape_sequence = False
+ for c in iterator:
+ if c == '\\': # escape sequence
+ escape_sequence = True
+ elif escape_sequence and c != ' ':
+ escape_sequence = False
+ elif c == '+' and separate_rdn:
+ dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
+ component = ''
+ continue
+ elif c == ',':
+ if '=' in component:
+ dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
+ component = ''
+ continue
+
+ component += c
+
+ dn.append(_add_ava(component, decompose, remove_space, space_around_equal))
+ return dn
+
+
+def _find_first_unescaped(dn, char, pos):
+ while True:
+ pos = dn.find(char, pos)
+ if pos == -1:
+ break # no char found
+ if pos > 0 and dn[pos - 1] != '\\': # unescaped char
+ break
+ elif pos > 1 and dn[pos - 1] == '\\': # may be unescaped
+ escaped = True
+ for c in dn[pos - 2:0:-1]:
+ if c == '\\':
+ escaped = not escaped
+ else:
+ break
+ if not escaped:
+ break
+ pos += 1
+
+ return pos
+
+
+def _find_last_unescaped(dn, char, start, stop=0):
+ while True:
+ stop = dn.rfind(char, start, stop)
+ if stop == -1:
+ break
+ if stop >= 0 and dn[stop - 1] != '\\':
+ break
+ elif stop > 1 and dn[stop - 1] == '\\': # may be unescaped
+ escaped = True
+ for c in dn[stop - 2:0:-1]:
+ if c == '\\':
+ escaped = not escaped
+ else:
+ break
+ if not escaped:
+ break
+ if stop < start:
+ stop = -1
+ break
+
+ return stop
+
+
+def _get_next_ava(dn):
+ comma = _find_first_unescaped(dn, ',', 0)
+ plus = _find_first_unescaped(dn, '+', 0)
+
+ if plus > 0 and (plus < comma or comma == -1):
+ equal = _find_first_unescaped(dn, '=', plus + 1)
+ if equal > plus + 1:
+ plus = _find_last_unescaped(dn, '+', plus, equal)
+ return dn[:plus], '+'
+
+ if comma > 0:
+ equal = _find_first_unescaped(dn, '=', comma + 1)
+ if equal > comma + 1:
+ comma = _find_last_unescaped(dn, ',', comma, equal)
+ return dn[:comma], ','
+
+ return dn, ''
+
+
+def _split_ava(ava, escape=False, strip=True):
+ equal = ava.find('=')
+ while equal > 0: # not first character
+ if ava[equal - 1] != '\\': # not an escaped equal so it must be an ava separator
+ # attribute_type1 = ava[0:equal].strip() if strip else ava[0:equal]
+ if strip:
+ attribute_type = ava[0:equal].strip()
+ attribute_value = _escape_attribute_value(ava[equal + 1:].strip()) if escape else ava[equal + 1:].strip()
+ else:
+ attribute_type = ava[0:equal]
+ attribute_value = _escape_attribute_value(ava[equal + 1:]) if escape else ava[equal + 1:]
+
+ return attribute_type, attribute_value
+ equal = ava.find('=', equal + 1)
+
+ return '', (ava.strip if strip else ava) # if no equal found return only value
+
+
+def _validate_attribute_type(attribute_type):
+ if not attribute_type:
+ raise LDAPInvalidDnError('attribute type not present')
+
+ if attribute_type == '<GUID': # patch for AD DirSync
+ return True
+
+ for c in attribute_type:
+ if not (c in ascii_letters or c in digits or c == '-'): # allowed uppercase and lowercase letters, digits and hyphen as per RFC 4512
+ raise LDAPInvalidDnError('character \'' + c + '\' not allowed in attribute type')
+
+ if attribute_type[0] in digits or attribute_type[0] == '-': # digits and hyphen not allowed as first character
+ raise LDAPInvalidDnError('character \'' + attribute_type[0] + '\' not allowed as first character of attribute type')
+
+ return True
+
+
+def _validate_attribute_value(attribute_value):
+ if not attribute_value:
+ return False
+
+ if attribute_value[0] == '#': # only hex characters are valid
+ for c in attribute_value:
+ if c not in hexdigits: # allowed only hex digits as per RFC 4514
+ raise LDAPInvalidDnError('character ' + c + ' not allowed in hex representation of attribute value')
+ if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
+ raise LDAPInvalidDnError('hex representation must be in the form of <HEX><HEX> pairs')
+ if attribute_value[0] == ' ': # unescaped space cannot be used as leading or last character
+ raise LDAPInvalidDnError('SPACE must be escaped as leading character of attribute value')
+ if attribute_value.endswith(' ') and not attribute_value.endswith('\\ '):
+ raise LDAPInvalidDnError('SPACE must be escaped as trailing character of attribute value')
+
+ state = STATE_ANY
+ for c in attribute_value:
+ if state == STATE_ANY:
+ if c == '\\':
+ state = STATE_ESCAPE
+ elif c in '"#+,;<=>\00':
+ raise LDAPInvalidDnError('special character ' + c + ' must be escaped')
+ elif state == STATE_ESCAPE:
+ if c in hexdigits:
+ state = STATE_ESCAPE_HEX
+ elif c in ' "#+,;<=>\\\00':
+ state = STATE_ANY
+ else:
+ raise LDAPInvalidDnError('invalid escaped character ' + c)
+ elif state == STATE_ESCAPE_HEX:
+ if c in hexdigits:
+ state = STATE_ANY
+ else:
+ raise LDAPInvalidDnError('invalid escaped character ' + c)
+
+ # final state
+ if state != STATE_ANY:
+ raise LDAPInvalidDnError('invalid final character')
+
+ return True
+
+
+def _escape_attribute_value(attribute_value):
+ if not attribute_value:
+ return ''
+
+ if attribute_value[0] == '#': # with leading SHARP only pairs of hex characters are valid
+ valid_hex = True
+ if len(attribute_value) % 2 == 0: # string must be # + HEX HEX (an odd number of chars)
+ valid_hex = False
+
+ if valid_hex:
+ for c in attribute_value:
+ if c not in hexdigits: # allowed only hex digits as per RFC 4514
+ valid_hex = False
+ break
+
+ if valid_hex:
+ return attribute_value
+
+ state = STATE_ANY
+ escaped = ''
+ tmp_buffer = ''
+ for c in attribute_value:
+ if state == STATE_ANY:
+ if c == '\\':
+ state = STATE_ESCAPE
+ elif c in '"#+,;<=>\00':
+ escaped += '\\' + c
+ else:
+ escaped += c
+ elif state == STATE_ESCAPE:
+ if c in hexdigits:
+ tmp_buffer = c
+ state = STATE_ESCAPE_HEX
+ elif c in ' "#+,;<=>\\\00':
+ escaped += '\\' + c
+ state = STATE_ANY
+ else:
+ escaped += '\\\\' + c
+ elif state == STATE_ESCAPE_HEX:
+ if c in hexdigits:
+ escaped += '\\' + tmp_buffer + c
+ else:
+ escaped += '\\\\' + tmp_buffer + c
+ tmp_buffer = ''
+ state = STATE_ANY
+
+ # final state
+ if state == STATE_ESCAPE:
+ escaped += '\\\\'
+ elif state == STATE_ESCAPE_HEX:
+ escaped += '\\\\' + tmp_buffer
+
+ if escaped[0] == ' ': # leading SPACE must be escaped
+ escaped = '\\' + escaped
+
+ if escaped[-1] == ' ' and len(escaped) > 1 and escaped[-2] != '\\': # trailing SPACE must be escaped
+ escaped = escaped[:-1] + '\\ '
+
+ return escaped
+
+
+def parse_dn(dn, escape=False, strip=False):
+ """
+ Parses a DN into syntactic components
+ :param dn:
+ :param escape:
+ :param strip:
+ :return:
+ a list of tripels representing `attributeTypeAndValue` elements
+ containing `attributeType`, `attributeValue` and the following separator (`COMMA` or `PLUS`) if given, else an empty `str`.
+ in their original representation, still containing escapes or encoded as hex.
+ """
+ rdns = []
+ avas = []
+ while dn:
+ ava, separator = _get_next_ava(dn) # if returned ava doesn't containg any unescaped equal it'a appended to last ava in avas
+
+ dn = dn[len(ava) + 1:]
+ if _find_first_unescaped(ava, '=', 0) > 0 or len(avas) == 0:
+ avas.append((ava, separator))
+ else:
+ avas[len(avas) - 1] = (avas[len(avas) - 1][0] + avas[len(avas) - 1][1] + ava, separator)
+
+ for ava, separator in avas:
+ attribute_type, attribute_value = _split_ava(ava, escape, strip)
+
+ if not _validate_attribute_type(attribute_type):
+ raise LDAPInvalidDnError('unable to validate attribute type in ' + ava)
+
+ if not _validate_attribute_value(attribute_value):
+ raise LDAPInvalidDnError('unable to validate attribute value in ' + ava)
+
+ rdns.append((attribute_type, attribute_value, separator))
+ dn = dn[len(ava) + 1:]
+
+ if not rdns:
+ raise LDAPInvalidDnError('empty dn')
+
+ return rdns
+
+
+def safe_dn(dn, decompose=False, reverse=False):
+ """
+ normalize and escape a dn, if dn is a sequence it is joined.
+ the reverse parameter changes the join direction of the sequence
+ """
+ if isinstance(dn, SEQUENCE_TYPES):
+ components = [rdn for rdn in dn]
+ if reverse:
+ dn = ','.join(reversed(components))
+ else:
+ dn = ','.join(components)
+ if decompose:
+ escaped_dn = []
+ else:
+ escaped_dn = ''
+
+ if dn.startswith('<GUID=') and dn.endswith('>'): # Active Directory allows looking up objects by putting its GUID in a specially-formatted DN (e.g. '<GUID=7b95f0d5-a3ed-486c-919c-077b8c9731f2>')
+ escaped_dn = dn
+ elif dn.startswith('<WKGUID=') and dn.endswith('>'): # Active Directory allows Binding to Well-Known Objects Using WKGUID in a specially-formatted DN (e.g. <WKGUID=a9d1ca15768811d1aded00c04fd8d5cd,dc=Fabrikam,dc=com>)
+ escaped_dn = dn
+ elif dn.startswith('<SID=') and dn.endswith('>'): # Active Directory allows looking up objects by putting its security identifier (SID) in a specially-formatted DN (e.g. '<SID=S-#-#-##-##########-##########-##########-######>')
+ escaped_dn = dn
+ elif '@' not in dn: # active directory UPN (User Principal Name) consist of an account, the at sign (@) and a domain, or the domain level logn name domain\username
+ for component in parse_dn(dn, escape=True):
+ if decompose:
+ escaped_dn.append((component[0], component[1], component[2]))
+ else:
+ escaped_dn += component[0] + '=' + component[1] + component[2]
+ elif '@' in dn and '=' not in dn and len(dn.split('@')) != 2:
+ raise LDAPInvalidDnError('Active Directory User Principal Name must consist of name@domain')
+ elif '\\' in dn and '=' not in dn and len(dn.split('\\')) != 2:
+ raise LDAPInvalidDnError('Active Directory Domain Level Logon Name must consist of name\\domain')
+ else:
+ escaped_dn = dn
+
+ return escaped_dn
+
+
+def safe_rdn(dn, decompose=False):
+ """Returns a list of rdn for the dn, usually there is only one rdn, but it can be more than one when the + sign is used"""
+ escaped_rdn = []
+ one_more = True
+ for component in parse_dn(dn, escape=True):
+ if component[2] == '+' or one_more:
+ if decompose:
+ escaped_rdn.append((component[0], component[1]))
+ else:
+ escaped_rdn.append(component[0] + '=' + component[1])
+ if component[2] == '+':
+ one_more = True
+ else:
+ one_more = False
+ break
+
+ if one_more:
+ raise LDAPInvalidDnError('bad dn ' + str(dn))
+
+ return escaped_rdn
+
+
+def escape_rdn(rdn):
+ """
+ Escape rdn characters to prevent injection according to RFC 4514.
+ """
+
+ # '/' must be handled first or the escape slashes will be escaped!
+ for char in ['\\', ',', '+', '"', '<', '>', ';', '=', '\x00']:
+ rdn = rdn.replace(char, '\\' + char)
+
+ if rdn[0] == '#' or rdn[0] == ' ':
+ rdn = ''.join(('\\', rdn))
+
+ if rdn[-1] == ' ':
+ rdn = ''.join((rdn[:-1], '\\ '))
+
+ return rdn
diff --git a/ldap3/utils/hashed.py b/ldap3/utils/hashed.py
index 33a2b89..e58d67d 100644
--- a/ldap3/utils/hashed.py
+++ b/ldap3/utils/hashed.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/utils/log.py b/ldap3/utils/log.py
index e55592e..228c745 100644
--- a/ldap3/utils/log.py
+++ b/ldap3/utils/log.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -180,8 +180,16 @@ def get_library_log_detail_level():
def format_ldap_message(message, prefix):
+ if isinstance(message, LDAPMessage):
+ try: # pyasn1 prettyprint raises exception in version 0.4.3
+ formatted = message.prettyPrint().split('\n') # pyasn1 pretty print
+ except Exception as e:
+ formatted = ['pyasn1 exception', str(e)]
+ else:
+ formatted = pformat(message).split('\n')
+
prefixed = ''
- for line in (message.prettyPrint().split('\n') if isinstance(message, LDAPMessage) else pformat(message).split('\n')): # uses pyasn1 LDAP message prettyPrint() method
+ for line in formatted:
if line:
if _hide_sensitive_data and line.strip().lower().startswith(_sensitive_lines): # _sensitive_lines is a tuple. startswith() method checks each tuple element
tag, _, data = line.partition('=')
diff --git a/ldap3/utils/ntlm.py b/ldap3/utils/ntlm.py
index 54efaae..f91776d 100644
--- a/ldap3/utils/ntlm.py
+++ b/ldap3/utils/ntlm.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -483,7 +483,7 @@ class NtlmClient(object):
temp += self.server_target_info_raw
temp += pack('<I', 0) # Z(4)
response_key_nt = self.ntowf_v2()
- nt_proof_str = hmac.new(response_key_nt, self.server_challenge + temp).digest()
+ nt_proof_str = hmac.new(response_key_nt, self.server_challenge + temp, digestmod=hashlib.md5).digest()
nt_challenge_response = nt_proof_str + temp
return nt_challenge_response
@@ -494,4 +494,4 @@ class NtlmClient(object):
password_digest = binascii.unhexlify(passparts[1])
else:
password_digest = hashlib.new('MD4', self._password.encode('utf-16-le')).digest()
- return hmac.new(password_digest, (self.user_name.upper() + self.user_domain).encode('utf-16-le')).digest()
+ return hmac.new(password_digest, (self.user_name.upper() + self.user_domain).encode('utf-16-le'), digestmod=hashlib.md5).digest()
diff --git a/ldap3/utils/port_validators.py b/ldap3/utils/port_validators.py
new file mode 100644
index 0000000..a35e13e
--- /dev/null
+++ b/ldap3/utils/port_validators.py
@@ -0,0 +1,37 @@
+""" Some helper functions for validation of ports and lists of ports. """
+
+
+def check_port(port):
+ """ Check if a port is valid. Return an error message indicating what is invalid if something isn't valid. """
+ if isinstance(port, int):
+ if port not in range(0, 65535):
+ return 'Source port must in range from 0 to 65535'
+ else:
+ return 'Source port must be an integer'
+ return None
+
+
+def check_port_and_port_list(port, port_list):
+ """ Take in a port and a port list and check that at most one is non-null. Additionally check that if either
+ is non-null, it is valid.
+ Return an error message indicating what is invalid if something isn't valid.
+ """
+ if port is not None and port_list is not None:
+ return'Cannot specify both a source port and a source port list'
+ elif port is not None:
+ if isinstance(port, int):
+ if port not in range(0, 65535):
+ return 'Source port must in range from 0 to 65535'
+ else:
+ return 'Source port must be an integer'
+ elif port_list is not None:
+ try:
+ _ = iter(port_list)
+ except TypeError:
+ return 'Source port list must be an iterable {}'.format(port_list)
+
+ for candidate_port in port_list:
+ err = check_port(candidate_port)
+ if err:
+ return err
+ return None
diff --git a/ldap3/utils/repr.py b/ldap3/utils/repr.py
index b5379cd..8ac9d0b 100644
--- a/ldap3/utils/repr.py
+++ b/ldap3/utils/repr.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/utils/tls_backport.py b/ldap3/utils/tls_backport.py
index 8cd2cad..252c141 100644
--- a/ldap3/utils/tls_backport.py
+++ b/ldap3/utils/tls_backport.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/utils/uri.py b/ldap3/utils/uri.py
index 658d1bb..02c8e5a 100644
--- a/ldap3/utils/uri.py
+++ b/ldap3/utils/uri.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/ldap3/version.py b/ldap3/version.py
index f7915ed..531a578 100644
--- a/ldap3/version.py
+++ b/ldap3/version.py
@@ -1,9 +1,9 @@
# THIS FILE IS AUTO-GENERATED. PLEASE DO NOT MODIFY# version file for ldap3
-# generated on 2018-01-21 07:32:14.704404
-# on system uname_result(system='Windows', node='ELITE10GC', release='10', version='10.0.16299', machine='AMD64', processor='Intel64 Family 6 Model 58 Stepping 9, GenuineIntel')
-# with Python 3.6.4 - ('v3.6.4:d48eceb', 'Dec 19 2017 06:54:40') - MSC v.1900 64 bit (AMD64)
+# generated on 2020-03-01 22:10:42.830758
+# on system uname_result(system='Windows', node='ELITE10GC', release='10', version='10.0.18362', machine='AMD64', processor='Intel64 Family 6 Model 58 Stepping 9, GenuineIntel')
+# with Python 3.8.1 - ('tags/v3.8.1:1b293b6', 'Dec 18 2019 23:11:46') - MSC v.1916 64 bit (AMD64)
#
-__version__ = '2.4.1'
+__version__ = '2.7'
__author__ = 'Giovanni Cannata'
__email__ = 'cannatag@gmail.com'
__url__ = 'https://github.com/cannatag/ldap3'
diff --git a/setup.py b/setup.py
index 3686ef0..df99a8a 100644
--- a/setup.py
+++ b/setup.py
@@ -23,8 +23,12 @@
# If not, see <http://www.gnu.org/licenses/>.
import os
-from setuptools import setup
+import glob
+import shutil
from json import load
+from distutils.command.clean import clean
+from distutils import log
+from setuptools import setup
version_dict = load(open('_version.json', 'r'))
version = str(version_dict['version'])
@@ -56,93 +60,26 @@ packages=['ldap3',
setup_kwargs = {'packages': packages,
'package_dir': {'': package_folder}}
-try:
- from Cython.Build import cythonize
- HAS_CYTHON = True
-except ImportError:
- HAS_CYTHON = False
-
-if 'LDAP3_CYTHON_COMPILE' in os.environ and HAS_CYTHON is True:
- import sys
- import multiprocessing
- import multiprocessing.pool
- from setuptools import Extension
- from distutils.command.build_py import build_py
- from distutils.command.build_ext import build_ext
- # Change to source's directory prior to running any command
- try:
- SETUP_DIRNAME = os.path.dirname(__file__)
- except NameError:
- # We're most likely being frozen and __file__ triggered this NameError
- # Let's work around that
- SETUP_DIRNAME = os.path.dirname(sys.argv[0])
- if SETUP_DIRNAME != '':
- os.chdir(SETUP_DIRNAME)
-
- SETUP_DIRNAME = os.path.abspath(SETUP_DIRNAME)
-
- def find_ext():
- for package in ('ldap3',):
- for root, _, files in os.walk(os.path.join(SETUP_DIRNAME, package)):
- commonprefix = os.path.commonprefix([SETUP_DIRNAME, root])
- for filename in files:
- full = os.path.join(root, filename)
- if not filename.endswith(('.py', '.c')):
- continue
- if filename in ('__init__.py',):
- continue
- relpath = os.path.join(root, filename).split(commonprefix)[-1][1:]
- module = os.path.splitext(relpath)[0].replace(os.sep, '.')
- yield Extension(module, [full])
-
- def discover_packages():
- modules = []
- pkg_data = {}
- pkg_dir = {}
- for package in ('ldap3',):
- for root, _, files in os.walk(os.path.join(SETUP_DIRNAME, package)):
- if '__init__.py' not in files:
- continue
- pdir = os.path.relpath(root, SETUP_DIRNAME)
- modname = pdir.replace(os.sep, '.')
- modules.append(modname)
- pkg_data.setdefault(modname, []).append('*.so')
- pkg_dir[modname] = pdir
- return modules, pkg_dir, pkg_data
-
- ext_modules = cythonize(list(find_ext()), nthreads=multiprocessing.cpu_count())
-
-
- class BuildPy(build_py):
-
- def find_package_modules(self, package, package_dir):
- modules = build_py.find_package_modules(self, package, package_dir)
- for package, module, filename in modules:
- if module not in ('__init__',):
- # We only want __init__ python files
- # All others will be built as extensions
- continue
- yield package, module, filename
-
-
- class BuildExt(build_ext):
-
- def run(self):
- self.extensions = ext_modules
- build_ext.run(self)
-
- def build_extensions(self):
- multiprocessing.pool.ThreadPool(
- processes=multiprocessing.cpu_count()).map(
- self.build_extension, self.extensions)
- packages, package_dir, package_data = discover_packages()
- setup_kwargs['packages'] = packages
- setup_kwargs['package_dir'] = package_dir
- setup_kwargs['package_data'] = package_data
- setup_kwargs['cmdclass'] = {'build_py': BuildPy, 'build_ext': BuildExt}
- setup_kwargs['ext_modules'] = ext_modules
- setup_kwargs['zip_safe'] = False
+class Clean(clean):
+ def run(self):
+ clean.run(self)
+ # Let's clean compiled *.py[c,o] *.c *.so
+ for subdir in ('ldap3',):
+ root = os.path.join(os.path.dirname(__file__), subdir)
+ for dirname, dirs, _ in os.walk(root):
+ for to_remove_filename in glob.glob('{0}/*.py[ocx]'.format(dirname)):
+ os.remove(to_remove_filename)
+ for to_remove_filename in glob.glob('{0}/*.c'.format(dirname)):
+ os.remove(to_remove_filename)
+ for to_remove_filename in glob.glob('{0}/*.so'.format(dirname)):
+ os.remove(to_remove_filename)
+ for dir_ in dirs:
+ if dir_ == '__pycache__':
+ shutil.rmtree(os.path.join(dirname, dir_))
+
+
+setup_kwargs['cmdclass'] = {'clean': Clean}
setup(name=package_name,
diff --git a/test/testAbandonOperation.py b/test/testAbandonOperation.py
index 4368a9d..1a25b38 100644
--- a/test/testAbandonOperation.py
+++ b/test/testAbandonOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testAbstractionAuxiliaryClass.py b/test/testAbstractionAuxiliaryClass.py
new file mode 100644
index 0000000..86bc3ae
--- /dev/null
+++ b/test/testAbstractionAuxiliaryClass.py
@@ -0,0 +1,104 @@
+"""
+"""
+# Created on 2014.01.19
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+from time import sleep
+
+from ldap3 import Writer, Reader, AttrDef, ObjectDef
+from ldap3.core.exceptions import LDAPCursorError
+from test.config import test_base, get_connection, drop_connection, random_id, test_moved, add_user, test_multivalued_attribute, test_server_type
+from ldap3.abstract import STATUS_COMMITTED, STATUS_MANDATORY_MISSING, STATUS_DELETED, STATUS_PENDING_CHANGES, STATUS_READ, \
+ STATUS_READY_FOR_DELETION, STATUS_READY_FOR_MOVING, STATUS_READY_FOR_RENAMING, STATUS_VIRTUAL, STATUS_WRITABLE
+
+testcase_id = ''
+
+
+class Test(unittest.TestCase):
+ def setUp(self):
+ global testcase_id
+ testcase_id = random_id()
+ self.connection = get_connection()
+ self.delete_at_teardown = []
+
+ def tearDown(self):
+ drop_connection(self.connection, self.delete_at_teardown)
+ self.assertFalse(self.connection.bound)
+
+ def test_create_entry_with_attribute_from_auxiliary_class(self):
+ if test_server_type != 'AD':
+ w = Writer(self.connection, 'inetorgperson', auxiliary_class='homeInfo')
+ n = w.new('cn=' + testcase_id + 'new-1,' + test_base)
+ n.sn = 'sn-test-1'
+ n.homeState = 'state-test-1'
+ self.assertEqual(n.entry_status, STATUS_PENDING_CHANGES)
+ n.entry_commit_changes()
+ self.assertEqual(n.sn, 'sn-test-1')
+ self.assertEqual(n.homeState, 'state-test-1')
+ self.assertEqual(n.entry_status, STATUS_COMMITTED)
+ n.entry_delete()
+ self.assertEqual(n.entry_status, STATUS_READY_FOR_DELETION)
+ n.entry_commit_changes()
+ self.assertEqual(n.entry_status, STATUS_DELETED)
+
+ def test_create_invalid_entry_with_attribute_from_auxiliary_class_not_declared(self):
+ w = Writer(self.connection, 'inetorgperson')
+ n = w.new('cn=' + testcase_id + 'new-2,' + test_base)
+ n.sn = 'sn-test-2'
+ with self.assertRaises(LDAPCursorError):
+ n.homeState = 'state-test-2'
+
+ def test_read_entry_with_attribute_from_auxiliary_class(self):
+ if test_server_type != 'AD':
+ w = Writer(self.connection, 'inetorgperson', auxiliary_class='homeInfo')
+ n = w.new('cn=' + testcase_id + 'new-3,' + test_base)
+ n.sn = 'sn-test-3'
+ n.homeState = 'state-test-3'
+ self.assertEqual(n.entry_status, STATUS_PENDING_CHANGES)
+ n.entry_commit_changes()
+ self.assertEqual(n.sn, 'sn-test-3')
+ self.assertEqual(n.homeState, 'state-test-3')
+ self.assertEqual(n.entry_status, STATUS_COMMITTED)
+
+ r = Reader(self.connection, 'inetorgperson', test_base, '(cn=' + testcase_id + 'new-3', auxiliary_class='homeInfo')
+ r.search()
+ self.assertTrue(r[0].cn, testcase_id + 'new-3')
+ self.assertTrue(r[0].homeState, testcase_id + 'state-test-3')
+
+ def test_read_entry_with_attribute_from_missing_auxiliary_class(self):
+ if test_server_type != 'AD':
+ w = Writer(self.connection, 'inetorgperson', auxiliary_class='homeInfo')
+ n = w.new('cn=' + testcase_id + 'new-4,' + test_base)
+ n.sn = 'sn-test-4'
+ n.homeState = 'state-test-4'
+ self.assertEqual(n.entry_status, STATUS_PENDING_CHANGES)
+ n.entry_commit_changes()
+ self.assertEqual(n.sn, 'sn-test-4')
+ self.assertEqual(n.homeState, 'state-test-4')
+ self.assertEqual(n.entry_status, STATUS_COMMITTED)
+
+ r = Reader(self.connection, 'inetorgperson', test_base, '(cn=' + testcase_id + 'new-4')
+ r.search()
+ self.assertTrue(r[0].cn, testcase_id + 'new-4')
+ with self.assertRaises(LDAPCursorError):
+ self.assertTrue(r[0].homeState, testcase_id + 'state-test-3')
diff --git a/test/testAbstractionDefs.py b/test/testAbstractionDefs.py
index 7adde2a..92ef901 100644
--- a/test/testAbstractionDefs.py
+++ b/test/testAbstractionDefs.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testAbstractionDefsFromSchema.py b/test/testAbstractionDefsFromSchema.py
index 0c9243a..1f9364f 100644
--- a/test/testAbstractionDefsFromSchema.py
+++ b/test/testAbstractionDefsFromSchema.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testAbstractionSearch.py b/test/testAbstractionSearch.py
index 8a2f09e..743ab8f 100644
--- a/test/testAbstractionSearch.py
+++ b/test/testAbstractionSearch.py
@@ -4,7 +4,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testAbstractionWrite.py b/test/testAbstractionWrite.py
index 62e155f..ad88cd2 100644
--- a/test/testAbstractionWrite.py
+++ b/test/testAbstractionWrite.py
@@ -4,7 +4,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -177,3 +177,4 @@ class Test(unittest.TestCase):
e.myname += 'xyz'
w.commit()
self.assertTrue('xyz' in e.myname)
+
diff --git a/test/testAddMembersToGroups.py b/test/testAddMembersToGroups.py
index 4164bf1..7ea8ef1 100644
--- a/test/testAddMembersToGroups.py
+++ b/test/testAddMembersToGroups.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testAddOperation.py b/test/testAddOperation.py
index 5ceeac4..40bbb2c 100644
--- a/test/testAddOperation.py
+++ b/test/testAddOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -24,8 +24,10 @@
# If not, see <http://www.gnu.org/licenses/>.
import unittest
+from copy import deepcopy
-from test.config import get_connection, drop_connection, add_user, random_id
+from test.config import get_connection, drop_connection, add_user, random_id, get_add_user_attributes,\
+ test_user_password, generate_dn, test_base
testcase_id = ''
@@ -47,5 +49,23 @@ class Test(unittest.TestCase):
self.assertEqual('success', self.delete_at_teardown[0][1]['description'])
def test_add_bytes(self):
- self.delete_at_teardown.append(add_user(self.connection, testcase_id, 'add-operation-1', test_bytes=True))
+ self.delete_at_teardown.append(add_user(self.connection, testcase_id, 'add-operation-2', test_bytes=True))
+ self.assertEqual('success', self.delete_at_teardown[0][1]['description'])
+
+ def test_unmodified_attributes_dict(self):
+ attributes = get_add_user_attributes(testcase_id, 'add-operation-3', test_user_password)
+ object_class = attributes.pop('objectClass')
+ copy_of_attributes = deepcopy(attributes)
+ dn = generate_dn(test_base, testcase_id, 'add-operation-3')
+ self.connection.add(dn, object_class, attributes)
+ self.connection.delete(dn)
+ self.assertDictEqual(copy_of_attributes, attributes)
+
+ def test_add_binary(self):
+ bin1 = open('512b-rsa-example-cert.der','rb')
+ bin2 = open('1024b-rsa-example-cert.der','rb')
+ der_certificates = [bin1.read(), bin2.read()]
+ bin1.close()
+ bin2.close()
+ self.delete_at_teardown.append(add_user(self.connection, testcase_id, 'add-operation-4', attributes={'userCertificate;binary': der_certificates}))
self.assertEqual('success', self.delete_at_teardown[0][1]['description'])
diff --git a/test/testBindOperation.py b/test/testBindOperation.py
index 658539e..0642e07 100644
--- a/test/testBindOperation.py
+++ b/test/testBindOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testBytesOperation.py b/test/testBytesOperation.py
index 0a8a366..3020d1b 100644
--- a/test/testBytesOperation.py
+++ b/test/testBytesOperation.py
@@ -6,7 +6,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCaseInsensitiveDictionary.py b/test/testCaseInsensitiveDictionary.py
index e1d5b42..69bfa0c 100644
--- a/test/testCaseInsensitiveDictionary.py
+++ b/test/testCaseInsensitiveDictionary.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCaseInsensitiveWithAliasDictionary.py b/test/testCaseInsensitiveWithAliasDictionary.py
index a7d27f1..7a225be 100644
--- a/test/testCaseInsensitiveWithAliasDictionary.py
+++ b/test/testCaseInsensitiveWithAliasDictionary.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2017 - 2018 Giovanni Cannata
+# Copyright 2017 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -1012,8 +1012,7 @@ class Test(unittest.TestCase):
self.assertTrue(True)
except Exception:
self.fail('wrong exception')
- else:
- self.fail('double alias')
+
self.assertEqual(cid._store, {'oNe': 1})
self.assertEqual(cid._aliases, {'one-a': 'one'})
self.assertEqual(cid._alias_keymap, {'one': ['one-a']})
@@ -1026,8 +1025,7 @@ class Test(unittest.TestCase):
self.assertTrue(True)
except Exception:
self.fail('wrong exception')
- else:
- self.fail('double alias')
+
self.assertEqual(cid._store, {'oNe': 1})
self.assertEqual(cid._aliases, {'one-a': 'one'})
self.assertEqual(cid._alias_keymap, {'one': ['one-a']})
@@ -1041,8 +1039,7 @@ class Test(unittest.TestCase):
self.assertTrue(True)
except Exception:
self.fail('wrong exception')
- else:
- self.fail('double alias')
+
self.assertEqual(cid._store, {'oNe': 1})
self.assertEqual(cid._aliases, {'one-a': 'one'})
self.assertEqual(cid._alias_keymap, {'one': ['one-a']})
@@ -1056,8 +1053,7 @@ class Test(unittest.TestCase):
self.assertTrue(True)
except Exception:
self.fail('wrong exception')
- else:
- self.fail('double alias')
+
self.assertEqual(cid._store, {'oNe': 1})
self.assertEqual(cid._aliases, {'one-a': 'one'})
self.assertEqual(cid._alias_keymap, {'one': ['one-a']})
diff --git a/test/testCheckGroupMembership.py b/test/testCheckGroupMembership.py
index 0be8631..20584c4 100644
--- a/test/testCheckGroupMembership.py
+++ b/test/testCheckGroupMembership.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCheckNamesFalse.py b/test/testCheckNamesFalse.py
index 242a678..4715593 100644
--- a/test/testCheckNamesFalse.py
+++ b/test/testCheckNamesFalse.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCheckNamesTrue.py b/test/testCheckNamesTrue.py
index 80ac964..6976d68 100644
--- a/test/testCheckNamesTrue.py
+++ b/test/testCheckNamesTrue.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCheckedAttributes.py b/test/testCheckedAttributes.py
index d7ac91b..89bbe4c 100644
--- a/test/testCheckedAttributes.py
+++ b/test/testCheckedAttributes.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testCompareOperation.py b/test/testCompareOperation.py
index 031b2fe..a46af04 100644
--- a/test/testCompareOperation.py
+++ b/test/testCompareOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testConnection.py b/test/testConnection.py
index ed7d8d7..144d1ad 100644
--- a/test/testConnection.py
+++ b/test/testConnection.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testControls.py b/test/testControls.py
index bd71f41..b5f5162 100644
--- a/test/testControls.py
+++ b/test/testControls.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testDeleteOperation.py b/test/testDeleteOperation.py
index 407642a..7f552b4 100644
--- a/test/testDeleteOperation.py
+++ b/test/testDeleteOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testDnParsing.py b/test/testDnParsing.py
index dd1b095..bcc740c 100644
--- a/test/testDnParsing.py
+++ b/test/testDnParsing.py
@@ -1,111 +1,205 @@
-"""
-"""
-
-# Created on 2014.09.15
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2014 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-import unittest
-
-from ldap3.utils.dn import parse_dn as p
-
-
-class Test(unittest.TestCase):
- def test_parse_dn_single(self):
- parsed = p('cn=admin')
- self.assertEqual(len(parsed), 1)
- self.assertEqual(parsed[0], ('cn', 'admin', ''))
-
- def test_parse_dn_double(self):
- parsed = p('cn=user1,o=users')
- self.assertEqual(len(parsed), 2)
- self.assertEqual(parsed[0], ('cn', 'user1', ','))
- self.assertEqual(parsed[1], ('o', 'users', ''))
-
- def test_parse_dn_multi(self):
- parsed = p('cn=user1,ou=users,dc=branch,dc=company,c=IT')
- self.assertEqual(len(parsed), 5)
- self.assertEqual(parsed[0], ('cn', 'user1', ','))
- self.assertEqual(parsed[1], ('ou', 'users', ','))
- self.assertEqual(parsed[2], ('dc', 'branch', ','))
- self.assertEqual(parsed[3], ('dc', 'company', ','))
- self.assertEqual(parsed[4], ('c', 'IT', ''))
-
- def test_parse_dn_multi_type(self):
- parsed = p('cn=user1+sn=surname1,o=users')
- self.assertEqual(len(parsed), 3)
- self.assertEqual(parsed[0], ('cn', 'user1', '+'))
- self.assertEqual(parsed[1], ('sn', 'surname1', ','))
- self.assertEqual(parsed[2], ('o', 'users', ''))
-
- def test_parse_dn_escaped_single(self):
- parsed = p('cn=admi\\,n')
- self.assertEqual(len(parsed), 1)
- self.assertEqual(parsed[0], ('cn', 'admi\\,n', ''))
-
- def test_parse_dn_escaped_double(self):
- parsed = p('cn=us\\=er1,o=us\\,ers')
- self.assertEqual(len(parsed), 2)
- self.assertEqual(parsed[0], ('cn', 'us\\=er1', ','))
- self.assertEqual(parsed[1], ('o', 'us\\,ers', ''))
-
- def test_parse_dn_escaped_multi(self):
- parsed = p('cn=us\\,er1,ou=us\\08ers,dc=br\\,anch,dc=company,c=IT')
- self.assertEqual(len(parsed), 5)
- self.assertEqual(parsed[0], ('cn', 'us\\,er1', ','))
- self.assertEqual(parsed[1], ('ou', 'us\\08ers', ','))
- self.assertEqual(parsed[2], ('dc', 'br\\,anch', ','))
- self.assertEqual(parsed[3], ('dc', 'company', ','))
- self.assertEqual(parsed[4], ('c', 'IT', ''))
-
- def test_parse_dn_escaped_multi_type(self):
- parsed = p('cn=us\\+er1+sn=su\\,rname1,o=users')
- self.assertEqual(len(parsed), 3)
- self.assertEqual(parsed[0], ('cn', 'us\\+er1', '+'))
- self.assertEqual(parsed[1], ('sn', 'su\\,rname1', ','))
- self.assertEqual(parsed[2], ('o', 'users', ''))
-
- def test_parse_dn_unescaped_single(self):
- parsed = p('cn=admi,n', escape=True)
- self.assertEqual(len(parsed), 1)
- self.assertEqual(parsed[0], ('cn', 'admi\\,n', ''))
-
- def test_parse_dn_unescaped_double(self):
- parsed = p('cn=us=er1,o=us,ers', escape=True)
- self.assertEqual(len(parsed), 2)
- self.assertEqual(parsed[0], ('cn', 'us\\=er1', ','))
- self.assertEqual(parsed[1], ('o', 'us\\,ers', ''))
-
- def test_parse_dn_unescaped_multi(self):
- parsed = p('cn=us,er1,ou=use<rs,dc=br+anch,dc=company,c=IT', escape=True)
- self.assertEqual(len(parsed), 5)
- self.assertEqual(parsed[0], ('cn', 'us\\,er1', ','))
- self.assertEqual(parsed[1], ('ou', 'use\\<rs', ','))
- self.assertEqual(parsed[2], ('dc', 'br\\+anch', ','))
- self.assertEqual(parsed[3], ('dc', 'company', ','))
- self.assertEqual(parsed[4], ('c', 'IT', ''))
-
- def test_parse_dn_unescaped_multi_type(self):
- parsed = p('cn=us+er1+sn=su,rname1,o=users', escape=True)
- self.assertEqual(len(parsed), 3)
- self.assertEqual(parsed[0], ('cn', 'us\\+er1', '+'))
- self.assertEqual(parsed[1], ('sn', 'su\\,rname1', ','))
- self.assertEqual(parsed[2], ('o', 'users', ''))
+"""
+"""
+
+# Created on 2014.09.15
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2014 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+
+from ldap3.utils.dn import parse_dn as p
+
+
+class Test(unittest.TestCase):
+ def test_parse_dn_single(self):
+ parsed = p('cn=admin')
+ self.assertEqual(len(parsed), 1)
+ self.assertEqual(parsed[0], ('cn', 'admin', ''))
+
+ def test_parse_dn_single_multi_rdn(self):
+ parsed = p('cn=admin+email=admin@site.com')
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', 'admin', '+'))
+ self.assertEqual(parsed[1], ('email', 'admin@site.com', ''))
+
+ def test_parse_dn_escaped_single_multi_rdn(self):
+ parsed = p('cn=\\\\\\+admin+email=admin@site.com')
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', '\\\\\\+admin', '+'))
+ self.assertEqual(parsed[1], ('email', 'admin@site.com', ''))
+
+ def test_parse_dn_double(self):
+ parsed = p('cn=user1,o=users')
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', 'user1', ','))
+ self.assertEqual(parsed[1], ('o', 'users', ''))
+
+ def test_parse_dn_multi(self):
+ parsed = p('cn=user1,ou=users,dc=branch,dc=company,c=IT')
+ self.assertEqual(len(parsed), 5)
+ self.assertEqual(parsed[0], ('cn', 'user1', ','))
+ self.assertEqual(parsed[1], ('ou', 'users', ','))
+ self.assertEqual(parsed[2], ('dc', 'branch', ','))
+ self.assertEqual(parsed[3], ('dc', 'company', ','))
+ self.assertEqual(parsed[4], ('c', 'IT', ''))
+
+ def test_parse_dn_multi_type(self):
+ parsed = p('cn=user1+sn=surname1,o=users')
+ self.assertEqual(len(parsed), 3)
+ self.assertEqual(parsed[0], ('cn', 'user1', '+'))
+ self.assertEqual(parsed[1], ('sn', 'surname1', ','))
+ self.assertEqual(parsed[2], ('o', 'users', ''))
+
+ def test_parse_dn_escaped_single(self):
+ parsed = p('cn=admi\\,n')
+ self.assertEqual(len(parsed), 1)
+ self.assertEqual(parsed[0], ('cn', 'admi\\,n', ''))
+
+ def test_parse_dn_escaped_double(self):
+ parsed = p('cn=us\\=er1,o=us\\,ers')
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', 'us\\=er1', ','))
+ self.assertEqual(parsed[1], ('o', 'us\\,ers', ''))
+
+ def test_parse_dn_escaped_double_1(self):
+ parsed = p('cn=\\\\,o=\\\\')
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', '\\\\', ','))
+ self.assertEqual(parsed[1], ('o', '\\\\', ''))
+
+ def test_parse_dn_escaped_multi(self):
+ parsed = p('cn=us\\,er1,ou=us\\08ers,dc=br\\,anch,dc=company,c=IT')
+ self.assertEqual(len(parsed), 5)
+ self.assertEqual(parsed[0], ('cn', 'us\\,er1', ','))
+ self.assertEqual(parsed[1], ('ou', 'us\\08ers', ','))
+ self.assertEqual(parsed[2], ('dc', 'br\\,anch', ','))
+ self.assertEqual(parsed[3], ('dc', 'company', ','))
+ self.assertEqual(parsed[4], ('c', 'IT', ''))
+
+ def test_parse_dn_escaped_multi_type(self):
+ parsed = p('cn=us\\+er1+sn=su\\,rname1,o=users')
+ self.assertEqual(len(parsed), 3)
+ self.assertEqual(parsed[0], ('cn', 'us\\+er1', '+'))
+ self.assertEqual(parsed[1], ('sn', 'su\\,rname1', ','))
+ self.assertEqual(parsed[2], ('o', 'users', ''))
+
+ def test_parse_dn_unescaped_single(self):
+ parsed = p('cn=admi,n', escape=True)
+ self.assertEqual(len(parsed), 1)
+ self.assertEqual(parsed[0], ('cn', 'admi\\,n', ''))
+
+ def test_parse_dn_unescaped_double(self):
+ parsed = p('cn=us=er1,o=us,ers', escape=True)
+ self.assertEqual(len(parsed), 2)
+ self.assertEqual(parsed[0], ('cn', 'us\\=er1', ','))
+ self.assertEqual(parsed[1], ('o', 'us\\,ers', ''))
+
+ def test_parse_dn_unescaped_multi(self):
+ parsed = p('cn=us,er1,ou=use<rs,dc=br+anch,dc=company,c=IT', escape=True)
+ self.assertEqual(len(parsed), 5)
+ self.assertEqual(parsed[0], ('cn', 'us\\,er1', ','))
+ self.assertEqual(parsed[1], ('ou', 'use\\<rs', ','))
+ self.assertEqual(parsed[2], ('dc', 'br\\+anch', ','))
+ self.assertEqual(parsed[3], ('dc', 'company', ','))
+ self.assertEqual(parsed[4], ('c', 'IT', ''))
+
+ def test_parse_dn_unescaped_multi_type(self):
+ parsed = p('cn=us+er1+sn=su,rname1,o=users', escape=True)
+ self.assertEqual(len(parsed), 3)
+ self.assertEqual(parsed[0], ('cn', 'us\\+er1', '+'))
+ self.assertEqual(parsed[1], ('sn', 'su\\,rname1', ','))
+ self.assertEqual(parsed[2], ('o', 'users', ''))
+
+ def pair_generator(self):
+ # escaped = DQUOTE / PLUS / COMMA / SEMI / LANGLE / RANGLE
+ escaped = ['"', '+', ',', ';', '<', '>']
+ # special = escaped / SPACE / SHARP / EQUALS
+ special = escaped + [' ', '#', '=']
+ ESC = ['\\']
+ # hexpair = HEX HEX
+ hexpairs = ['00', '99', 'aa', 'AA', 'ff', 'FF', 'aF', 'Af']
+ # pair = ESC ( ESC / special / hexpair )
+ pair = self.combine_strings(ESC, ESC + special + hexpairs)
+ return pair
+
+ def test_parse_dn_pair(self):
+ if hasattr(self, 'subTest'): # python3 only
+ for onepair in self.pair_generator():
+ for attributeValue, mode in [
+ (onepair, "alone"),
+ ("a" + onepair + "b", "between"),
+ ("a" + onepair, "after"),
+ (onepair + "b", "before")
+ ]:
+ for separator in [None, '+', ',']:
+
+ if not separator:
+ dn = r'cn={0}'.format(attributeValue)
+ expected = [('cn', attributeValue, '')]
+ else:
+ dn = r'cn={0}{1}ou={0}'.format(attributeValue, separator)
+ expected = [('cn', attributeValue, separator), ('ou', attributeValue, '')]
+
+ with self.subTest(pair=onepair, separator=separator, mode=mode, input=dn):
+ self._test_parse_dn(
+ dn,
+ expected
+ )
+
+
+ def combine_strings(self, *args):
+ if len(args) == 0: raise Exception("Invalid parameter")
+ if len(args) == 1:
+ for variant in args[0]:
+ yield variant
+ else:
+ for head in args[0]:
+ for tail in self.combine_strings(*args[1:]):
+ variant = head + tail
+ yield variant
+
+ def test_combine_strings(self):
+ variants = set(self.combine_strings(['a', 'b'], ['x', 'y']))
+ self.assertEqual(variants, {'ax', 'ay', 'bx', 'by'})
+
+ def test_combine_strings_empty1(self):
+ variants = set(self.combine_strings([], ['x', 'y']))
+ self.assertEqual(len(variants), 0)
+
+ def test_combine_strings_empty2(self):
+ variants = set(self.combine_strings(['a', 'b'], []))
+ self.assertEqual(len(variants), 0)
+
+ def _test_parse_dn(self, input, expected):
+ parsed = p(input, escape=False)
+ self.assertEqual(parsed, expected)
+
+ def test_unit_test_deep_equality(self):
+ self.assertEqual([], [])
+ self.assertNotEqual([], [()])
+ self.assertEqual([()], [()])
+ self.assertNotEqual([()], [(), ()])
+ self.assertNotEqual([()], [('b')])
+ self.assertEqual([('a')], [('a')])
+ self.assertNotEqual([('a')], [('b')])
+ self.assertEqual([('a', 'b', 'x'), ('a', 'b', 'x')], [('a', 'b', 'x'), ('a', 'b', 'x')])
+ self.assertNotEqual([('a', 'b', 'x'), ('a', 'b', 'x')], [('a', 'b', 'x'), ('a', 'b', 'y')])
+ self.assertNotEqual([('1')], [(1)])
diff --git a/test/testExceptions.py b/test/testExceptions.py
index 7e21f2b..1518cfa 100644
--- a/test/testExceptions.py
+++ b/test/testExceptions.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testExtendedOperations.py b/test/testExtendedOperations.py
index bce4e63..891d620 100644
--- a/test/testExtendedOperations.py
+++ b/test/testExtendedOperations.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -84,7 +84,7 @@ class Test(unittest.TestCase):
def test_novell_list_replicas(self):
if test_server_type == 'EDIR' and not self.connection.strategy.no_real_dsa:
result = self.connection.extend.novell.list_replicas('cn=' + test_server_edir_name + ',' + test_server_context)
- self.assertEqual(result, None)
+ self.assertEqual(result, ['', 'ou=referrals,o=test'])
def test_novell_replica_info(self):
if test_server_type == 'EDIR' and not self.connection.strategy.no_real_dsa:
diff --git a/test/testExtensions.py b/test/testExtensions.py
index cc21059..e224b4b 100644
--- a/test/testExtensions.py
+++ b/test/testExtensions.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testFormatGeneralizedTime.py b/test/testFormatGeneralizedTime.py
index 4e51ee5..6770cd1 100644
--- a/test/testFormatGeneralizedTime.py
+++ b/test/testFormatGeneralizedTime.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testLDIF-change.py b/test/testLDIF-change.py
index d1718e2..16e3bcd 100644
--- a/test/testLDIF-change.py
+++ b/test/testLDIF-change.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testLDIF-content.py b/test/testLDIF-content.py
index b5a348a..781efc5 100644
--- a/test/testLDIF-content.py
+++ b/test/testLDIF-content.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testMicrosoftAD.py b/test/testMicrosoftAD.py
index c2054bf..53f54fa 100644
--- a/test/testMicrosoftAD.py
+++ b/test/testMicrosoftAD.py
@@ -6,7 +6,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -88,9 +88,9 @@ class Test(unittest.TestCase):
self.assertTrue(found)
def test_dir_sync(self):
- if False: # takes a long long time to complete
- # if test_server_type == 'AD':
- sync = self.connection.extend.microsoft.dir_sync(test_root_partition, attributes=['*'])
+ # if False: # takes a long long time to complete
+ if test_server_type == 'AD':
+ sync = self.connection.extend.microsoft.dir_sync(test_root_partition, attributes=['*'], incremental_values=True)
# read all previous changes
while sync.more_results:
print('PREV', len(sync.loop()))
diff --git a/test/testMockASyncStrategy.py b/test/testMockASyncStrategy.py
index a0a8de8..716febc 100644
--- a/test/testMockASyncStrategy.py
+++ b/test/testMockASyncStrategy.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -26,11 +26,10 @@
import unittest
-from ldap3 import Server, Connection, MOCK_ASYNC, MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, OFFLINE_EDIR_8_8_8,\
- BASE, LEVEL, SUBTREE, AUTO_BIND_NO_TLS, NONE
+from ldap3 import Server, Connection, MOCK_ASYNC, MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, OFFLINE_EDIR_9_1_4, BASE, LEVEL, SUBTREE, AUTO_BIND_NO_TLS, NONE
from ldap3.core.exceptions import LDAPInvalidCredentialsResult, LDAPNoSuchObjectResult
from ldap3.protocol.rfc4512 import SchemaInfo, DsaInfo
-from ldap3.protocol.schemas.edir888 import edir_8_8_8_dsa_info, edir_8_8_8_schema
+from ldap3.protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info
from test.config import random_id
testcase_id = ''
@@ -41,13 +40,13 @@ class Test(unittest.TestCase):
global testcase_id
testcase_id = random_id()
# The mock server can be defined in two different ways, so tests are duplicated, connection_3 is without schema
- schema = SchemaInfo.from_json(edir_8_8_8_schema)
- info = DsaInfo.from_json(edir_8_8_8_dsa_info, schema)
+ schema = SchemaInfo.from_json(edir_9_1_4_schema)
+ info = DsaInfo.from_json(edir_9_1_4_dsa_info, schema)
server_1 = Server.from_definition('MockSyncServer', info, schema)
self.connection_1 = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_ASYNC)
self.connection_1b = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_ASYNC)
self.connection_1c = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_ASYNC, raise_exceptions=True)
- server_2 = Server('dummy', get_info=OFFLINE_EDIR_8_8_8)
+ server_2 = Server('dummy', get_info=OFFLINE_EDIR_9_1_4)
self.connection_2 = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_ASYNC)
self.connection_2b = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_ASYNC)
self.connection_2c = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_ASYNC, raise_exceptions=True)
diff --git a/test/testMockBase.py b/test/testMockBase.py
index 21b377a..ac6d821 100644
--- a/test/testMockBase.py
+++ b/test/testMockBase.py
@@ -2,7 +2,7 @@
#
# Author: Giovanni Cannata & signedbit
#
-# Copyright 2016 - 2018 Giovanni Cannata & signedbit
+# Copyright 2016 - 2020 Giovanni Cannata & signedbit
#
# This file is part of ldap3.
#
@@ -24,14 +24,15 @@ import unittest
from ldap3 import SchemaInfo, DsaInfo, Server, Connection, MOCK_SYNC
from ldap3.operation import search
-from ldap3.protocol.schemas.edir888 import edir_8_8_8_schema, edir_8_8_8_dsa_info
+from ldap3.core.exceptions import LDAPSizeLimitExceededResult
+from ldap3.protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info
class Test(unittest.TestCase):
@classmethod
def setUpClass(cls):
- cls.schema = SchemaInfo.from_json(edir_8_8_8_schema)
- info = DsaInfo.from_json(edir_8_8_8_dsa_info, cls.schema)
+ cls.schema = SchemaInfo.from_json(edir_9_1_4_schema)
+ info = DsaInfo.from_json(edir_9_1_4_dsa_info, cls.schema)
cls.server = Server.from_definition('MockSyncServer', info, cls.schema)
cls.connection = Connection(cls.server, user='cn=user1,ou=test', password='test1', client_strategy=MOCK_SYNC)
@@ -62,7 +63,17 @@ class Test(unittest.TestCase):
self.assertEqual(actual, expected)
+ # def test_raises_size_limit_exceeded_exception(self):
+ # connection = Connection(self.server, user='cn=user1,ou=test', password='test1', client_strategy=MOCK_SYNC, raise_exceptions=True)
+ # # create fixtures
+ # connection.strategy.add_entry('cn=user1,ou=test', {'userPassword': 'test1', 'revision': 1})
+ # connection.strategy.add_entry('cn=user2,ou=test', {'userPassword': 'test2', 'revision': 2})
+ # connection.strategy.add_entry('cn=user3,ou=test', {'userPassword': 'test3', 'revision': 3})
+ # connection.bind()
+ # with self.assertRaises(LDAPSizeLimitExceededResult):
+ # connection.search('ou=test', '(cn=*)', size_limit=1)
+
def _evaluate_filter(self, search_filter):
- filter_root = search.parse_filter(search_filter, self.schema, auto_escape=True, auto_encode=False, check_names=False)
+ filter_root = search.parse_filter(search_filter, self.schema, auto_escape=True, auto_encode=False, validator=None, check_names=False)
candidates = self.server.dit
return self.connection.strategy.evaluate_filter_node(filter_root, candidates)
diff --git a/test/testMockSyncStrategy.py b/test/testMockSyncStrategy.py
index 654f624..6b88683 100644
--- a/test/testMockSyncStrategy.py
+++ b/test/testMockSyncStrategy.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -26,11 +26,11 @@
import unittest
-from ldap3 import Server, Connection, MOCK_SYNC, MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, OFFLINE_EDIR_8_8_8,\
+from ldap3 import Server, Connection, MOCK_SYNC, MODIFY_ADD, MODIFY_REPLACE, MODIFY_DELETE, OFFLINE_EDIR_9_1_4,\
BASE, LEVEL, SUBTREE, AUTO_BIND_NO_TLS, NONE
from ldap3.core.exceptions import LDAPInvalidCredentialsResult, LDAPNoSuchObjectResult
from ldap3.protocol.rfc4512 import SchemaInfo, DsaInfo
-from ldap3.protocol.schemas.edir888 import edir_8_8_8_dsa_info, edir_8_8_8_schema
+from ldap3.protocol.schemas.edir914 import edir_9_1_4_dsa_info, edir_9_1_4_schema
from test.config import random_id
testcase_id = ''
@@ -41,13 +41,13 @@ class Test(unittest.TestCase):
global testcase_id
testcase_id = random_id()
# The mock server can be defined in two different ways, so tests are duplicated, connection_3 is without schema
- schema = SchemaInfo.from_json(edir_8_8_8_schema)
- info = DsaInfo.from_json(edir_8_8_8_dsa_info, schema)
+ schema = SchemaInfo.from_json(edir_9_1_4_schema)
+ info = DsaInfo.from_json(edir_9_1_4_dsa_info, schema)
server_1 = Server.from_definition('MockSyncServer', info, schema)
self.connection_1 = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_SYNC)
self.connection_1b = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_SYNC)
self.connection_1c = Connection(server_1, user='cn=user1,ou=test,o=lab', password='test1111', client_strategy=MOCK_SYNC, raise_exceptions=True)
- server_2 = Server('dummy', get_info=OFFLINE_EDIR_8_8_8)
+ server_2 = Server('dummy', get_info=OFFLINE_EDIR_9_1_4)
self.connection_2 = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_SYNC)
self.connection_2b = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_SYNC)
self.connection_2c = Connection(server_2, user='cn=user2,ou=test,o=lab', password='test2222', client_strategy=MOCK_SYNC, raise_exceptions=True)
@@ -56,21 +56,21 @@ class Test(unittest.TestCase):
self.connection_3b = Connection(server_3, user='cn=user3,ou=test,o=lab', password='test3333', client_strategy=MOCK_SYNC)
self.connection_3c = Connection(server_3, user='cn=user3,ou=test,o=lab', password='test3333', client_strategy=MOCK_SYNC, raise_exceptions=True)
# creates fixtures
- self.connection_1.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0})
- self.connection_2.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0})
- self.connection_3.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0})
- self.connection_1.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1})
- self.connection_2.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1})
- self.connection_3.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1})
- self.connection_1.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2})
- self.connection_2.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2})
- self.connection_3.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2})
- self.connection_1.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3})
- self.connection_2.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3})
- self.connection_3.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3})
- self.connection_1.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3']})
- self.connection_2.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3']})
- self.connection_3.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3']})
+ self.connection_1.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0, 'guid': '07039e68-4373-264d-a0a7-000000000000'})
+ self.connection_2.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0, 'guid': '07039e68-4373-264d-a0a7-000000000000'})
+ self.connection_3.strategy.add_entry('cn=user0,o=lab', {'userPassword': 'test0000', 'sn': 'user0_sn', 'revision': 0, 'guid': '07039e68-4373-264d-a0a7-000000000000'})
+ self.connection_1.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1, 'guid': '07039e68-4373-264d-a0a7-111111111111'})
+ self.connection_2.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1, 'guid': '07039e68-4373-264d-a0a7-111111111111'})
+ self.connection_3.strategy.add_entry('cn=user1,ou=test,o=lab', {'userPassword': 'test1111', 'sn': 'user1_sn', 'revision': 1, 'guid': '07039e68-4373-264d-a0a7-111111111111'})
+ self.connection_1.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2, 'guid': '07039e68-4373-264d-a0a7-222222222222'})
+ self.connection_2.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2, 'guid': '07039e68-4373-264d-a0a7-222222222222'})
+ self.connection_3.strategy.add_entry('cn=user2,ou=test,o=lab', {'userPassword': 'test2222', 'sn': 'user2_sn', 'revision': 2, 'guid': '07039e68-4373-264d-a0a7-222222222222'})
+ self.connection_1.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3, 'guid': '07039e68-4373-264d-a0a7-333333333333'})
+ self.connection_2.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3, 'guid': '07039e68-4373-264d-a0a7-333333333333'})
+ self.connection_3.strategy.add_entry('cn=user3,ou=test,o=lab', {'userPassword': 'test3333', 'sn': 'user3_sn', 'revision': 3, 'guid': '07039e68-4373-264d-a0a7-333333333333'})
+ self.connection_1.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3'], 'guid': '07039e68-4373-264d-a0a7-444444444444'})
+ self.connection_2.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3'], 'guid': '07039e68-4373-264d-a0a7-444444444444'})
+ self.connection_3.strategy.add_entry('cn=user4,ou=test,o=lab', {'userPassword': 'test4444', 'sn': 'user4_sn', 'revision': 4, 'title': ['title1', 'title2', 'title3'], 'guid': '07039e68-4373-264d-a0a7-444444444444'})
def tearDown(self):
self.connection_1.unbind()
@@ -954,6 +954,42 @@ class Test(unittest.TestCase):
self.assertEqual(result['description'], 'success')
self.assertEqual('user3', response[0]['attributes']['cn'][0])
+ def test_search_exact_match_single_binary_attribute_1(self):
+ self.connection_1.bind()
+ result = self.connection_1.search('o=lab', '(guid=07039e68-4373-264d-a0a7-111111111111)', search_scope=SUBTREE, attributes=['cn', 'guid'])
+ response = self.connection_1.response
+ if not self.connection_1.strategy.sync:
+ response, result = self.connection_1.get_response(result)
+ else:
+ result = self.connection_1.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual('user1', response[0]['attributes']['cn'][0])
+ self.assertEqual('07039e68-4373-264d-a0a7-111111111111', response[0]['attributes']['guid'])
+
+ def test_search_exact_match_single_binary_attribute_2(self):
+ self.connection_2.bind()
+ result = self.connection_2.search('o=lab', '(guid=07039e68-4373-264d-a0a7-222222222222)', search_scope=SUBTREE, attributes=['cn', 'guid'])
+ response = self.connection_2.response
+ if not self.connection_2.strategy.sync:
+ response, result = self.connection_2.get_response(result)
+ else:
+ result = self.connection_2.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual('user2', response[0]['attributes']['cn'][0])
+ self.assertEqual('07039e68-4373-264d-a0a7-222222222222', response[0]['attributes']['guid'])
+
+ def test_search_exact_match_single_binary_attribute_3(self):
+ self.connection_3.bind()
+ result = self.connection_3.search('o=lab', '(guid=07039e68-4373-264d-a0a7-333333333333)', search_scope=SUBTREE, attributes=['cn', 'guid'])
+ response = self.connection_3.response
+ if not self.connection_3.strategy.sync:
+ response, result = self.connection_3.get_response(result)
+ else:
+ result = self.connection_3.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual('user3', response[0]['attributes']['cn'][0])
+ self.assertEqual('07039e68-4373-264d-a0a7-333333333333', response[0]['attributes']['guid'][0])
+
def test_search_exact_match_case_insensitive_single_attribute_1(self):
self.connection_1.bind()
result = self.connection_1.search('o=lab', '(cn=UsEr1)', search_scope=SUBTREE, attributes=['cn', 'sn'])
diff --git a/test/testModifyDNOperation.py b/test/testModifyDNOperation.py
index d69a88e..0d4946a 100644
--- a/test/testModifyDNOperation.py
+++ b/test/testModifyDNOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testModifyOperation.py b/test/testModifyOperation.py
index c4d94b9..90898db 100644
--- a/test/testModifyOperation.py
+++ b/test/testModifyOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testOfflineSchema.py b/test/testOfflineSchema.py
index 62404dd..4a1b7d2 100644
--- a/test/testOfflineSchema.py
+++ b/test/testOfflineSchema.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -25,14 +25,14 @@
import unittest
-from ldap3 import Server, OFFLINE_EDIR_8_8_8, SchemaInfo, DsaInfo
+from ldap3 import Server, OFFLINE_EDIR_9_1_4, SchemaInfo, DsaInfo
from ldap3.protocol.rfc4512 import ObjectClassInfo, AttributeTypeInfo
from test.config import test_server, get_connection, drop_connection
class Test(unittest.TestCase):
def setUp(self):
- self.connection = get_connection(get_info=OFFLINE_EDIR_8_8_8)
+ self.connection = get_connection(get_info=OFFLINE_EDIR_9_1_4)
def tearDown(self):
drop_connection(self.connection)
diff --git a/test/testParseSearchFilter.py b/test/testParseSearchFilter.py
index db133f2..7a2ba19 100644
--- a/test/testParseSearchFilter.py
+++ b/test/testParseSearchFilter.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -27,27 +27,27 @@ import unittest
from ldap3.operation.search import parse_filter, MATCH_EQUAL, MATCH_EXTENSIBLE
from ldap3.utils.conv import escape_filter_chars
-from ldap3.protocol.schemas.edir888 import edir_8_8_8_schema
+from ldap3.protocol.schemas.edir914 import edir_9_1_4_schema, edir_9_1_4_dsa_info
from ldap3.protocol.rfc4512 import SchemaInfo, DsaInfo
from ldap3.core.exceptions import LDAPAttributeError, LDAPObjectClassError
-from test.config import test_auto_escape, test_auto_encode, test_check_names
+from test.config import test_auto_escape, test_auto_encode, test_validator, test_check_names
class Test(unittest.TestCase):
def test_parse_search_filter_equality(self):
- f = parse_filter('(cn=admin)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(cn=admin)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EQUAL)
self.assertEqual(f.elements[0].assertion['attr'], 'cn')
self.assertEqual(f.elements[0].assertion['value'], b'admin')
def test_parse_search_filter_equality_2(self):
- f = parse_filter('(cn=a<=b=>c)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(cn=a<=b=>c)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EQUAL)
self.assertEqual(f.elements[0].assertion['attr'], 'cn')
self.assertEqual(f.elements[0].assertion['value'], b'a<=b=>c')
def test_parse_search_filter_extensible_syntax_1(self):
- f = parse_filter('(cn:caseExactMatch:=Fred Flintstone)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(cn:caseExactMatch:=Fred Flintstone)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], 'cn')
self.assertEqual(f.elements[0].assertion['value'], b'Fred Flintstone')
@@ -55,7 +55,7 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], False)
def test_parse_search_filter_extensible_syntax_2(self):
- f = parse_filter('(cn:=Betty Rubble)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(cn:=Betty Rubble)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], 'cn')
self.assertEqual(f.elements[0].assertion['value'], b'Betty Rubble')
@@ -63,7 +63,7 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], False)
def test_parse_search_filter_extensible_syntax_3(self):
- f = parse_filter('(sn:dn:2.4.6.8.10:=Barney Rubble)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(sn:dn:2.4.6.8.10:=Barney Rubble)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], 'sn')
self.assertEqual(f.elements[0].assertion['value'], b'Barney Rubble')
@@ -71,7 +71,7 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], True)
def test_parse_search_filter_extensible_syntax_4(self):
- f = parse_filter('(o:dn:=Ace Industry)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(o:dn:=Ace Industry)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], 'o')
self.assertEqual(f.elements[0].assertion['value'], b'Ace Industry')
@@ -79,7 +79,7 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], True)
def test_parse_search_filter_extensible_syntax_5(self):
- f = parse_filter('(:1.2.3:=Wilma Flintstone)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(:1.2.3:=Wilma Flintstone)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], False)
self.assertEqual(f.elements[0].assertion['value'], b'Wilma Flintstone')
@@ -87,7 +87,7 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], False)
def test_parse_search_filter_extensible_syntax_6(self):
- f = parse_filter('(:DN:2.4.6.8.10:=Dino)', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(:DN:2.4.6.8.10:=Dino)', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EXTENSIBLE)
self.assertEqual(f.elements[0].assertion['attr'], False)
self.assertEqual(f.elements[0].assertion['value'], b'Dino')
@@ -95,25 +95,25 @@ class Test(unittest.TestCase):
self.assertEqual(f.elements[0].assertion['dnAttributes'], True)
def test_parse_search_filter_parenteses(self):
- f = parse_filter('(cn=' + escape_filter_chars('Doe (Missing Inc)') + ')', None, test_auto_escape, test_auto_encode, test_check_names)
+ f = parse_filter('(cn=' + escape_filter_chars('Doe (Missing Inc)') + ')', None, test_auto_escape, test_auto_encode, test_validator, test_check_names)
self.assertEqual(f.elements[0].tag, MATCH_EQUAL)
self.assertEqual(f.elements[0].assertion['attr'], 'cn')
self.assertEqual(f.elements[0].assertion['value'], b'Doe \\28Missing Inc\\29')
def test_parse_search_filter_bad_attribute_type_check_true(self):
- self.assertRaises(LDAPAttributeError, parse_filter, '(bad=admin)', SchemaInfo.from_json(edir_8_8_8_schema), test_auto_escape, test_auto_encode, check_names=True)
+ self.assertRaises(LDAPAttributeError, parse_filter, '(bad=admin)', SchemaInfo.from_json(edir_9_1_4_schema), test_auto_escape, test_auto_encode, test_validator, check_names=True)
def test_parse_search_filter_bad_attribute_type_check_false(self):
- f = parse_filter('(bad=admin)', SchemaInfo.from_json(edir_8_8_8_schema), test_auto_escape, test_auto_encode, check_names=False)
+ f = parse_filter('(bad=admin)', SchemaInfo.from_json(edir_9_1_4_schema), test_auto_escape, test_auto_encode, test_validator, check_names=False)
self.assertEqual(f.elements[0].tag, MATCH_EQUAL)
self.assertEqual(f.elements[0].assertion['attr'], 'bad')
self.assertEqual(f.elements[0].assertion['value'], b'admin')
def test_parse_search_filter_bad_object_class_type_check_true(self):
- self.assertRaises(LDAPObjectClassError, parse_filter, '(objectClass=bad)', SchemaInfo.from_json(edir_8_8_8_schema), test_auto_escape, test_auto_encode, check_names=True)
+ self.assertRaises(LDAPObjectClassError, parse_filter, '(objectClass=bad)', SchemaInfo.from_json(edir_9_1_4_schema), test_auto_escape, test_auto_encode, test_validator, check_names=True)
def test_parse_search_filter_bad_object_class_type_check_false(self):
- f = parse_filter('(objectClass=bad)', SchemaInfo.from_json(edir_8_8_8_schema), test_auto_escape, test_auto_encode, check_names=False)
+ f = parse_filter('(objectClass=bad)', SchemaInfo.from_json(edir_9_1_4_schema), test_auto_escape, test_auto_encode, test_validator, check_names=False)
self.assertEqual(f.elements[0].tag, MATCH_EQUAL)
self.assertEqual(f.elements[0].assertion['attr'], 'objectClass')
self.assertEqual(f.elements[0].assertion['value'], b'bad')
diff --git a/test/testRebindOperation.py b/test/testRebindOperation.py
index f46d119..727c813 100644
--- a/test/testRebindOperation.py
+++ b/test/testRebindOperation.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testRemoveMembersFromGroups.py b/test/testRemoveMembersFromGroups.py
index 25ebb67..b28358e 100644
--- a/test/testRemoveMembersFromGroups.py
+++ b/test/testRemoveMembersFromGroups.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testRestartable.py b/test/testRestartable.py
index 562e95a..3dc46fb 100644
--- a/test/testRestartable.py
+++ b/test/testRestartable.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2014 - 2018 Giovanni Cannata
+# Copyright 2014 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -25,7 +25,7 @@
import unittest
-from test.config import test_server, test_user, test_password, test_lazy_connection, test_get_info, test_server_mode, test_base, test_strategy
+from test.config import test_server, test_user, test_password, test_lazy_connection, test_get_info, test_server_mode, test_base, test_strategy, test_server_type
from ldap3 import Server, Connection, ServerPool, RESTARTABLE, ROUND_ROBIN, BASE, MOCK_SYNC, MOCK_ASYNC
@@ -49,21 +49,22 @@ class Test(unittest.TestCase):
self.assertEqual(len(search_results), 1)
def test_restartable_invalid_server2(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- hosts = ['a.b.c.d'] + list(test_server)
- else:
- hosts = ['a.b.c.d', test_server]
- search_results = []
- servers = [Server(host=host, port=389, use_ssl=False) for host in hosts]
- server_pool = ServerPool(servers, ROUND_ROBIN, active=True, exhaust=True)
- connection = Connection(server_pool, user=test_user, password=test_password, client_strategy=RESTARTABLE, lazy=False)
- connection.open()
- connection.bind()
- connection.search(search_base=test_base, search_filter='(' + test_base.split(',')[0] + ')', search_scope=BASE)
- if connection.response:
- for resp in connection.response:
- if resp['type'] == 'searchResEntry':
- search_results.append(resp['dn'])
- connection.unbind()
- self.assertEqual(len(search_results), 1)
+ if test_server_type != 'AD':
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ hosts = ['a.b.c.d'] + list(test_server)
+ else:
+ hosts = ['a.b.c.d', test_server]
+ search_results = []
+ servers = [Server(host=host, port=389, use_ssl=False) for host in hosts]
+ server_pool = ServerPool(servers, ROUND_ROBIN, active=True, exhaust=True)
+ connection = Connection(server_pool, user=test_user, password=test_password, client_strategy=RESTARTABLE, lazy=False)
+ connection.open()
+ connection.bind()
+ connection.search(search_base=test_base, search_filter='(' + test_base.split(',')[0] + ')', search_scope=BASE)
+ if connection.response:
+ for resp in connection.response:
+ if resp['type'] == 'searchResEntry':
+ search_results.append(resp['dn'])
+ connection.unbind()
+ self.assertEqual(len(search_results), 1)
diff --git a/test/testSaslPrep.py b/test/testSaslPrep.py
index be406d2..f011b8c 100644
--- a/test/testSaslPrep.py
+++ b/test/testSaslPrep.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testSchema.py b/test/testSchema.py
index 31e8cee..3977035 100644
--- a/test/testSchema.py
+++ b/test/testSchema.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testSearchAndModifyEntries.py b/test/testSearchAndModifyEntries.py
index 08ae0af..1fe73cb 100644
--- a/test/testSearchAndModifyEntries.py
+++ b/test/testSearchAndModifyEntries.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testSearchOperation.py b/test/testSearchOperation.py
index 62b13bf..9804a76 100644
--- a/test/testSearchOperation.py
+++ b/test/testSearchOperation.py
@@ -6,7 +6,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
@@ -26,7 +26,7 @@
import unittest
-from ldap3.utils.conv import escape_bytes, escape_filter_chars
+from ldap3.utils.conv import escape_bytes, escape_filter_chars, ldap_escape_to_bytes
from test.config import test_base, test_name_attr, random_id, get_connection, \
add_user, drop_connection, test_server_type, test_int_attr
from ldap3 import SUBTREE
@@ -313,3 +313,54 @@ class Test(unittest.TestCase):
self.assertEqual(response[0]['attributes'][test_name_attr], testcase_id + 'sea-15')
else:
self.assertEqual(response[0]['attributes'][test_name_attr][0], testcase_id + 'sea-15')
+
+ def test_search_string_guid(self):
+ self.delete_at_teardown.append(add_user(self.connection, testcase_id, 'sea-16', attributes={'givenName': testcase_id + 'givenname-16'}))
+ if test_server_type == 'EDIR':
+ result = self.connection.search(search_base=test_base, search_filter='(givenname=' + testcase_id + 'givenname-16)', attributes=[test_name_attr, 'sn', 'guid'])
+ elif test_server_type == 'AD': # not tested on AD yet
+ result = self.connection.search(search_base=test_base, search_filter='(givenname=' + testcase_id + 'givenname-16)', attributes=[test_name_attr, 'sn', 'objectGuid'])
+ else: # not tested on other kind of servers
+ return
+ if not self.connection.strategy.sync:
+ response, result = self.connection.get_response(result)
+ else:
+ response = self.connection.response
+ result = self.connection.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual(len(response), 1)
+ if test_server_type == 'EDIR':
+ result = self.connection.search(search_base=test_base, search_filter='(guid=' + response[0]['attributes']['guid'] + ')', attributes=[test_name_attr, 'sn'])
+ elif test_server_type == 'AD': # not tested on AD yet
+ result = self.connection.search(search_base=test_base, search_filter='(objectguid=' + response[0]['attributes']['objectguid'] + ')', attributes=[test_name_attr, 'sn'])
+ if not self.connection.strategy.sync:
+ response, result = self.connection.get_response(result)
+ else:
+ response = self.connection.response
+ result = self.connection.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual(len(response), 1)
+ if test_server_type == 'EDIR':
+ self.assertEqual(response[0]['attributes'][test_name_attr][0], testcase_id + 'sea-16')
+ elif test_server_type == 'AD':
+ self.assertEqual(response[0]['attributes'][test_name_attr], testcase_id + 'sea-16')
+
+ def test_search_string_guid_with_backslash(self):
+ ldap_escaped = '\\7e\\18\\2a\\9c\\60\\5c\\61\\43\\af\\f5\\89\\f5\\e6\\d8\\45\\6d'
+ ldap_bytes = ldap_escape_to_bytes(ldap_escaped)
+ self.delete_at_teardown.append(add_user(self.connection, testcase_id, 'sea-17', attributes={'givenName': testcase_id + 'givenname-17', 'audio': ldap_bytes}))
+ if test_server_type == 'EDIR':
+ result = self.connection.search(search_base=test_base, search_filter='(audio=%s)' % ldap_escaped, attributes=[test_name_attr, 'sn', 'givenname', 'guid', 'audio'])
+ else: # not tested on other kind of servers
+ return
+ if not self.connection.strategy.sync:
+ response, result = self.connection.get_response(result)
+ else:
+ response = self.connection.response
+ result = self.connection.result
+ self.assertEqual(result['description'], 'success')
+ self.assertEqual(len(response), 1)
+ if test_server_type == 'EDIR':
+ self.assertEqual(response[0]['attributes'][test_name_attr][0], testcase_id + 'sea-17')
+ self.assertEqual(response[0]['attributes']['audio'][0], ldap_bytes)
+
diff --git a/test/testSearchOperationEntries.py b/test/testSearchOperationEntries.py
index 274c4a4..f77a6fd 100644
--- a/test/testSearchOperationEntries.py
+++ b/test/testSearchOperationEntries.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2015 - 2018 Giovanni Cannata
+# Copyright 2015 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testSearchOperationJSON.py b/test/testSearchOperationJSON.py
index d11d17d..dc25f44 100644
--- a/test/testSearchOperationJSON.py
+++ b/test/testSearchOperationJSON.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2013 - 2018 Giovanni Cannata
+# Copyright 2013 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testTls.py b/test/testTls.py
index e9ce8ae..891a3e6 100644
--- a/test/testTls.py
+++ b/test/testTls.py
@@ -1,189 +1,190 @@
-"""
-"""
-
-# Created on 2013.08.11
-#
-# Author: Giovanni Cannata
-#
-# Copyright 2013 - 2018 Giovanni Cannata
-#
-# This file is part of ldap3.
-#
-# ldap3 is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License as published
-# by the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# ldap3 is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with ldap3 in the COPYING and COPYING.LESSER files.
-# If not, see <http://www.gnu.org/licenses/>.
-
-import unittest
-import ssl
-
-from ldap3 import Server, Connection, ServerPool, Tls, SASL, EXTERNAL, MOCK_ASYNC, MOCK_SYNC
-from test.config import test_server, test_port, test_port_ssl, test_user, test_password, test_authentication, \
- test_strategy, test_lazy_connection, test_get_info, test_server_mode, test_valid_names, \
- test_pooling_strategy, test_pooling_active, test_pooling_exhaust, test_ca_cert_file, \
- test_user_cert_file, test_user_key_file
-
-
-class Test(unittest.TestCase):
- def test_start_tls(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port, tls=Tls(validate=ssl.CERT_NONE), get_info=test_get_info, mode=test_server_mode)
- connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
- connection.open()
- connection.start_tls()
- self.assertFalse(connection.closed)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
-
- def test_open_ssl_with_defaults(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port_ssl, use_ssl=True)
- connection = Connection(server, user=test_user, password=test_password)
- connection.open()
- self.assertFalse(connection.closed)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
-
- def test_open_with_tls_before_bind(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port, tls=Tls())
- connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
- connection.open(read_server_info=False)
- connection.start_tls(read_server_info=False)
- connection.bind()
- self.assertTrue(connection.bound)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
- self.assertFalse(connection.bound)
-
- def test_open_with_tls_after_bind(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port, tls=Tls())
- connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
- connection.open()
- connection.bind()
- connection.start_tls()
- self.assertTrue(connection.bound)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
- self.assertFalse(connection.bound)
-
- # def test_bind_ssl_with_certificate(self):
- # if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- # tls = Tls(local_private_key_file=test_user_key_file,
- # local_certificate_file=test_user_cert_file,
- # validate=ssl.CERT_REQUIRED,
- # version=ssl.PROTOCOL_TLSv1,
- # ca_certs_file=test_ca_cert_file,
- # valid_names=test_valid_names)
- # if isinstance(test_server, (list, tuple)):
- # server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- # for host in test_server:
- # server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- # else:
- # server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
- # connection = Connection(server, auto_bind=False, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication)
- # connection.open()
- # connection.bind()
- # self.assertTrue(connection.bound)
- # connection.unbind()
- # if connection.strategy.pooled:
- # connection.strategy.terminate()
- # self.assertFalse(connection.bound)
-
- # def test_sasl_with_external_certificate(self):
- # if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- # tls = Tls(local_private_key_file=test_user_key_file, local_certificate_file=test_user_cert_file, validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLSv1, ca_certs_file=test_ca_cert_file, valid_names=test_valid_names)
- # if isinstance(test_server, (list, tuple)):
- # server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- # for host in test_server:
- # server.add(Server(host=host, port=test_port_ssl, use_ssl=True, tls=tls, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- # else:
- # server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
- # connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, authentication=SASL, sasl_mechanism=EXTERNAL)
- # connection.open()
- # connection.bind()
- # self.assertTrue(connection.bound)
- # connection.unbind()
- # if connection.strategy.pooled:
- # connection.strategy.terminate()
- # self.assertFalse(connection.bound)
-
- def test_bind_ssl_cert_none(self):
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- tls = Tls(validate=ssl.CERT_NONE)
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
- connection = Connection(server, auto_bind=False, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication)
- connection.open()
- connection.bind()
- self.assertTrue(connection.bound)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
- self.assertFalse(connection.bound)
-
- def test_start_tls_with_cipher(self):
- # ciphers = None
- # ciphers = '!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2'
- ciphers = 'HIGH:!aNULL:!RC4:!DSS'
-
- if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
- if isinstance(test_server, (list, tuple)):
- server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
- for host in test_server:
- server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
- else:
- server = Server(host=test_server, port=test_port, tls=Tls(validate=ssl.CERT_NONE, ciphers=ciphers), get_info=test_get_info, mode=test_server_mode)
- connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
- connection.open()
- connection.start_tls()
- self.assertFalse(connection.closed)
- # self.assertEqual(connection.socket.cipher(), ciphers)
- connection.unbind()
- if connection.strategy.pooled:
- connection.strategy.terminate()
-
- # def test_hostname_doesnt_match(self):
- # tls_config = Tls(validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLSv1)
- # server = Server('edir1.hyperv', use_ssl=True, tls=tls_config)
- # conn = Connection(server)
- # conn.open()
- # self.assertTrue(conn.bound)
+"""
+"""
+
+# Created on 2013.08.11
+#
+# Author: Giovanni Cannata
+#
+# Copyright 2013 - 2020 Giovanni Cannata
+#
+# This file is part of ldap3.
+#
+# ldap3 is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License as published
+# by the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# ldap3 is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with ldap3 in the COPYING and COPYING.LESSER files.
+# If not, see <http://www.gnu.org/licenses/>.
+
+import unittest
+import ssl
+
+from ldap3 import Server, Connection, ServerPool, Tls, SASL, EXTERNAL, MOCK_ASYNC, MOCK_SYNC
+from test.config import test_server, test_port, test_port_ssl, test_user, test_password, test_authentication, \
+ test_strategy, test_lazy_connection, test_get_info, test_server_mode, test_valid_names, \
+ test_pooling_strategy, test_pooling_active, test_pooling_exhaust, test_ca_cert_file, \
+ test_user_cert_file, test_user_key_file
+
+
+class Test(unittest.TestCase):
+ def test_start_tls(self):
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port, tls=Tls(validate=ssl.CERT_NONE), get_info=test_get_info, mode=test_server_mode)
+ connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
+ connection.open()
+ connection.start_tls()
+ self.assertFalse(connection.closed)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+
+ def test_open_ssl_with_defaults(self):
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port_ssl, use_ssl=True)
+ connection = Connection(server, user=test_user, password=test_password)
+ connection.open()
+ self.assertFalse(connection.closed)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+
+ def test_open_with_tls_before_bind(self):
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port, tls=Tls())
+ connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
+ connection.open(read_server_info=False)
+ connection.start_tls(read_server_info=False)
+ connection.bind()
+ self.assertTrue(connection.bound)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+ self.assertFalse(connection.bound)
+
+ def test_open_with_tls_after_bind(self):
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port, tls=Tls())
+ connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
+ connection.open()
+ connection.bind()
+ connection.start_tls()
+ self.assertTrue(connection.bound)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+ self.assertFalse(connection.bound)
+
+ # def test_bind_ssl_with_certificate(self):
+ # if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ # tls = Tls(local_private_key_file=test_user_key_file,
+ # local_certificate_file=test_user_cert_file,
+ # validate=ssl.CERT_REQUIRED,
+ # version=ssl.PROTOCOL_SSLv23,
+ # ssl_options=[ssl.OP_NO_SSLv2, ssl.OP_NO_SSLv3],
+ # ca_certs_file=test_ca_cert_file,
+ # valid_names=test_valid_names)
+ # if isinstance(test_server, (list, tuple)):
+ # server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ # for host in test_server:
+ # server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ # else:
+ # server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
+ # connection = Connection(server, auto_bind=False, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication)
+ # connection.open()
+ # connection.bind()
+ # self.assertTrue(connection.bound)
+ # connection.unbind()
+ # if connection.strategy.pooled:
+ # connection.strategy.terminate()
+ # self.assertFalse(connection.bound)
+
+ # def test_sasl_with_external_certificate(self):
+ # if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ # tls = Tls(local_private_key_file=test_user_key_file, local_certificate_file=test_user_cert_file, validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLSv1, ca_certs_file=test_ca_cert_file, valid_names=test_valid_names)
+ # if isinstance(test_server, (list, tuple)):
+ # server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ # for host in test_server:
+ # server.add(Server(host=host, port=test_port_ssl, use_ssl=True, tls=tls, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ # else:
+ # server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
+ # connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, authentication=SASL, sasl_mechanism=EXTERNAL)
+ # connection.open()
+ # connection.bind()
+ # self.assertTrue(connection.bound)
+ # connection.unbind()
+ # if connection.strategy.pooled:
+ # connection.strategy.terminate()
+ # self.assertFalse(connection.bound)
+
+ def test_bind_ssl_cert_none(self):
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ tls = Tls(validate=ssl.CERT_NONE)
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port_ssl, use_ssl=True, tls=tls)
+ connection = Connection(server, auto_bind=False, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication)
+ connection.open()
+ connection.bind()
+ self.assertTrue(connection.bound)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+ self.assertFalse(connection.bound)
+
+ def test_start_tls_with_cipher(self):
+ # ciphers = None
+ # ciphers = '!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2'
+ ciphers = 'HIGH:!aNULL:!RC4:!DSS'
+
+ if test_strategy not in [MOCK_SYNC, MOCK_ASYNC]:
+ if isinstance(test_server, (list, tuple)):
+ server = ServerPool(pool_strategy=test_pooling_strategy, active=test_pooling_active, exhaust=test_pooling_exhaust)
+ for host in test_server:
+ server.add(Server(host=host, port=test_port, allowed_referral_hosts=('*', True), get_info=test_get_info, mode=test_server_mode))
+ else:
+ server = Server(host=test_server, port=test_port, tls=Tls(validate=ssl.CERT_NONE, ciphers=ciphers), get_info=test_get_info, mode=test_server_mode)
+ connection = Connection(server, auto_bind=False, version=3, client_strategy=test_strategy, user=test_user, password=test_password, authentication=test_authentication, lazy=test_lazy_connection, pool_name='pool1')
+ connection.open()
+ connection.start_tls()
+ self.assertFalse(connection.closed)
+ # self.assertEqual(connection.socket.cipher(), ciphers)
+ connection.unbind()
+ if connection.strategy.pooled:
+ connection.strategy.terminate()
+
+ # def test_hostname_doesnt_match(self):
+ # tls_config = Tls(validate=ssl.CERT_REQUIRED, version=ssl.PROTOCOL_TLSv1)
+ # server = Server('edir1.hyperv', use_ssl=True, tls=tls_config)
+ # conn = Connection(server)
+ # conn.open()
+ # self.assertTrue(conn.bound)
diff --git a/test/testTransactions.py b/test/testTransactions.py
index 2765c24..0f7917d 100644
--- a/test/testTransactions.py
+++ b/test/testTransactions.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#
diff --git a/test/testValidators.py b/test/testValidators.py
index fd585f3..1f8dccc 100644
--- a/test/testValidators.py
+++ b/test/testValidators.py
@@ -1,162 +1,204 @@
-import unittest
-from datetime import datetime
-
-from ldap3.protocol.formatters.validators import validate_integer, validate_boolean, validate_bytes, validate_generic_single_value, validate_time
-from ldap3.core.timezone import OffsetTzInfo
-
-class Test(unittest.TestCase):
- def test_int_validator_valid_number(self):
- validated = validate_integer(1)
- self.assertTrue(validated)
-
- def test_int_validator_invalid_number(self):
- validated = validate_integer(1.2)
- self.assertFalse(validated)
-
- def test_int_validator_valid_number_sequence(self):
- validated = validate_integer([1, 2, 3])
- self.assertTrue(validated)
-
- def test_int_validator_invalid_number_sequence(self):
- validated = validate_integer([1, 1.2, 3])
- self.assertFalse(validated)
-
- def test_int_validator_valid_string_number(self):
- validated = validate_integer('1')
- self.assertEqual(validated, 1)
-
- def test_int_validator_invalid_string_number(self):
- validated = validate_integer('1.2')
- self.assertFalse(validated)
-
- def test_int_validator_valid_string_number_sequence(self):
- validated = validate_integer(['1', '2', '3'])
- self.assertEqual(validated, [1, 2, 3])
-
- def test_int_validator_invalid_string_number_sequence(self):
- validated = validate_integer(['1', '1.2', '3'])
- self.assertFalse(validated)
-
- def test_int_validator_invalid_type_1(self):
- validated = validate_integer(True)
- self.assertFalse(validated)
-
- def test_int_validator_invalid_type_2(self):
- validated = validate_integer(False)
- self.assertFalse(validated)
-
- def test_int_validator_invalid_type_3(self):
- validated = validate_integer(Ellipsis)
- self.assertFalse(validated)
-
- def test_int_validator_invalid_type_4(self):
- validated = validate_integer(object)
- self.assertFalse(validated)
-
- def test_boolean_validator_valid_bool_true(self):
- validated = validate_boolean(True)
- self.assertEqual(validated, 'TRUE')
-
- def test_boolean_validator_valid_bool_false(self):
- validated = validate_boolean(False)
- self.assertEqual(validated, 'FALSE')
-
- def test_boolean_validator_valid_str_true_1(self):
- validated = validate_boolean('True')
- self.assertEqual(validated, 'TRUE')
-
- def test_boolean_validator_valid_str_false_1(self):
- validated = validate_boolean('False')
- self.assertEqual(validated, 'FALSE')
-
- def test_boolean_validator_valid_str_true_2(self):
- validated = validate_boolean('TrUe')
- self.assertEqual(validated, 'TRUE')
-
- def test_boolean_validator_valid_str_false_2(self):
- validated = validate_boolean('FaLsE')
- self.assertEqual(validated, 'FALSE')
-
- def test_boolean_validator_invalid_int_1(self):
- validated = validate_boolean(0)
- self.assertFalse(validated)
-
- def test_boolean_validator_invalid_int_2(self):
- validated = validate_boolean(1)
- self.assertFalse(validated)
-
- def test_boolean_validator_invalid_str_1(self):
- validated = validate_boolean('')
- self.assertFalse(validated)
-
- def test_boolean_validator_invalid_str_2(self):
- validated = validate_boolean('abc')
- self.assertFalse(validated)
-
- def test_bytes_validator_valid_bytes(self):
- validated = validate_bytes(bytes([1, 2, 3]))
- self.assertTrue(validated)
-
- def test_bytes_validator_invalid_str(self):
- if str is bytes: # Python 2
- validated = validate_bytes(unicode('abc'))
- else:
- validated = validate_bytes('abc')
-
- self.assertFalse(validated)
-
- def test_bytes_validator_invalid_object(self):
- validated = validate_bytes(object)
- self.assertFalse(validated)
-
- def test_validate_generic_single_value_valid_1(self):
- validated = validate_generic_single_value(1)
- self.assertTrue(validated)
-
- def test_validate_generic_single_value_valid_2(self):
- validated = validate_generic_single_value('abc')
- self.assertTrue(validated)
-
- def test_validate_generic_single_value_valid_3(self):
- validated = validate_generic_single_value(object)
- self.assertTrue(validated)
-
- def test_validate_generic_single_value_invalid_1(self):
- validated = validate_generic_single_value((1, 2))
- self.assertFalse(validated)
-
- def test_validate_generic_single_value_invalid_2(self):
- validated = validate_generic_single_value([1, 2])
- self.assertFalse(validated)
-
- def test_validate_generic_single_value_invalid_3(self):
- validated = validate_generic_single_value((a for a in range(2)))
- self.assertFalse(validated)
-
- def test_validate_time_valid_datetime(self):
- validated = validate_time(datetime.now())
- self.assertTrue(validated)
-
- def test_validate_time_valid_datetime_with_timezone(self):
- validated = validate_time(datetime.now(OffsetTzInfo(0, 'UTC')))
- self.assertTrue(validated)
-
- def test_validate_time_valid_str(self):
- validated = validate_time('20170317094232Z')
- self.assertTrue(validated)
-
- def test_validate_time_valid_str_with_timezone(self):
- validated = validate_time('20170317094232+0100')
- self.assertTrue(validated)
-
- def test_validate_time_invalid_str_1(self):
- validated = validate_time('abc')
- self.assertFalse(validated)
-
- def test_validate_time_invalid_str_2(self):
- validated = validate_time('20170317254201Z')
- self.assertFalse(validated)
-
- def test_validate_time_invalid_str_with_timezone(self):
- validated = validate_time('20170317094232+24')
- self.assertFalse(validated)
+import unittest
+from datetime import datetime
+
+from ldap3.protocol.formatters.validators import validate_integer, validate_boolean, validate_bytes, validate_generic_single_value, validate_time, validate_zero_and_minus_one_and_positive_int
+from ldap3.core.timezone import OffsetTzInfo
+
+class Test(unittest.TestCase):
+ def test_int_validator_valid_number(self):
+ validated = validate_integer(1)
+ self.assertTrue(validated)
+
+ def test_int_validator_invalid_number(self):
+ validated = validate_integer(1.2)
+ self.assertFalse(validated)
+
+ def test_int_validator_valid_number_sequence(self):
+ validated = validate_integer([1, 2, 3])
+ self.assertTrue(validated)
+
+ def test_int_validator_invalid_number_sequence(self):
+ validated = validate_integer([1, 1.2, 3])
+ self.assertFalse(validated)
+
+ def test_int_validator_valid_string_number(self):
+ validated = validate_integer('1')
+ self.assertEqual(validated, 1)
+
+ def test_int_validator_invalid_string_number(self):
+ validated = validate_integer('1.2')
+ self.assertFalse(validated)
+
+ def test_int_validator_valid_string_number_sequence(self):
+ validated = validate_integer(['1', '2', '3'])
+ self.assertEqual(validated, [1, 2, 3])
+
+ def test_int_validator_invalid_string_number_sequence(self):
+ validated = validate_integer(['1', '1.2', '3'])
+ self.assertFalse(validated)
+
+ def test_int_validator_invalid_type_1(self):
+ validated = validate_integer(True)
+ self.assertFalse(validated)
+
+ def test_int_validator_invalid_type_2(self):
+ validated = validate_integer(False)
+ self.assertFalse(validated)
+
+ def test_int_validator_invalid_type_3(self):
+ validated = validate_integer(Ellipsis)
+ self.assertFalse(validated)
+
+ def test_int_validator_invalid_type_4(self):
+ validated = validate_integer(object)
+ self.assertFalse(validated)
+
+ def test_boolean_validator_valid_bool_true(self):
+ validated = validate_boolean(True)
+ self.assertEqual(validated, 'TRUE')
+
+ def test_boolean_validator_valid_bool_false(self):
+ validated = validate_boolean(False)
+ self.assertEqual(validated, 'FALSE')
+
+ def test_boolean_validator_valid_str_true_1(self):
+ validated = validate_boolean('True')
+ self.assertEqual(validated, 'TRUE')
+
+ def test_boolean_validator_valid_str_false_1(self):
+ validated = validate_boolean('False')
+ self.assertEqual(validated, 'FALSE')
+
+ def test_boolean_validator_valid_str_true_2(self):
+ validated = validate_boolean('TrUe')
+ self.assertEqual(validated, 'TRUE')
+
+ def test_boolean_validator_valid_str_false_2(self):
+ validated = validate_boolean('FaLsE')
+ self.assertEqual(validated, 'FALSE')
+
+ def test_boolean_validator_invalid_int_1(self):
+ validated = validate_boolean(0)
+ self.assertFalse(validated)
+
+ def test_boolean_validator_invalid_int_2(self):
+ validated = validate_boolean(1)
+ self.assertFalse(validated)
+
+ def test_boolean_validator_invalid_str_1(self):
+ validated = validate_boolean('')
+ self.assertFalse(validated)
+
+ def test_boolean_validator_invalid_str_2(self):
+ validated = validate_boolean('abc')
+ self.assertFalse(validated)
+
+ def test_bytes_validator_valid_bytes(self):
+ validated = validate_bytes(bytes([1, 2, 3]))
+ self.assertTrue(validated)
+
+ def test_bytes_validator_invalid_str(self):
+ if str is bytes: # Python 2
+ validated = validate_bytes(unicode('abc'))
+ else:
+ validated = validate_bytes('abc')
+
+ self.assertFalse(validated)
+
+ def test_bytes_validator_invalid_object(self):
+ validated = validate_bytes(object)
+ self.assertFalse(validated)
+
+ def test_validate_generic_single_value_valid_1(self):
+ validated = validate_generic_single_value(1)
+ self.assertTrue(validated)
+
+ def test_validate_generic_single_value_valid_2(self):
+ validated = validate_generic_single_value('abc')
+ self.assertTrue(validated)
+
+ def test_validate_generic_single_value_valid_3(self):
+ validated = validate_generic_single_value(object)
+ self.assertTrue(validated)
+
+ def test_validate_generic_single_value_invalid_1(self):
+ validated = validate_generic_single_value((1, 2))
+ self.assertFalse(validated)
+
+ def test_validate_generic_single_value_invalid_2(self):
+ validated = validate_generic_single_value([1, 2])
+ self.assertFalse(validated)
+
+ def test_validate_generic_single_value_invalid_3(self):
+ validated = validate_generic_single_value((a for a in range(2)))
+ self.assertFalse(validated)
+
+ def test_validate_time_valid_datetime(self):
+ validated = validate_time(datetime.now())
+ self.assertTrue(validated)
+
+ def test_validate_time_valid_datetime_with_timezone(self):
+ validated = validate_time(datetime.now(OffsetTzInfo(0, 'UTC')))
+ self.assertTrue(validated)
+
+ def test_validate_time_valid_str(self):
+ validated = validate_time('20170317094232Z')
+ self.assertTrue(validated)
+
+ def test_validate_time_valid_str_with_timezone(self):
+ validated = validate_time('20170317094232+0100')
+ self.assertTrue(validated)
+
+ def test_validate_time_invalid_str_1(self):
+ validated = validate_time('abc')
+ self.assertFalse(validated)
+
+ def test_validate_time_invalid_str_2(self):
+ validated = validate_time('20170317254201Z')
+ self.assertFalse(validated)
+
+ def test_validate_time_invalid_str_with_timezone(self):
+ validated = validate_time('20170317094232+24')
+ self.assertFalse(validated)
+
+ def test_validate_minus_one_zero_greater_than_zero(self):
+ validated = validate_zero_and_minus_one_and_positive_int(0)
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(-1)
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(1)
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(2)
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(-2)
+ self.assertFalse(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('0')
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('-1')
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('1')
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('2')
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('-2')
+ self.assertFalse(validated)
+ validated = validate_zero_and_minus_one_and_positive_int([0])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int([-1])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int([1])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int([2])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int([-2])
+ self.assertFalse(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(['0'])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(['-1'])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(['1'])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int(['2'])
+ self.assertTrue(validated)
+ validated = validate_zero_and_minus_one_and_positive_int('-2')
+ self.assertFalse(validated)
diff --git a/test/testWriterCursor.py b/test/testWriterCursor.py
index 88ae148..f577813 100644
--- a/test/testWriterCursor.py
+++ b/test/testWriterCursor.py
@@ -5,7 +5,7 @@
#
# Author: Giovanni Cannata
#
-# Copyright 2016 - 2018 Giovanni Cannata
+# Copyright 2016 - 2020 Giovanni Cannata
#
# This file is part of ldap3.
#