diff --git a/Lib/ldap/schema/tokenizer.py b/Lib/ldap/schema/tokenizer.py index ede7e21..20958c0 100644 --- a/Lib/ldap/schema/tokenizer.py +++ b/Lib/ldap/schema/tokenizer.py @@ -52,16 +52,15 @@ def extract_tokens(l,known_tokens): """ assert l[0].strip()=="(" and l[-1].strip()==")",ValueError(l) result = {} - result_has_key = result.has_key result.update(known_tokens) i = 0 l_len = len(l) while i