-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Lib/ldap/schema/test_tokenizer.py is now Tests/t_ldap_schema_tokenize…
…r.py based on module unittest
- Loading branch information
stroeder
committed
Feb 14, 2017
1 parent
ba46652
commit 8e18435
Showing
2 changed files
with
36 additions
and
30 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,36 @@ | ||
| import unittest | ||
|
|
||
| import ldap.schema | ||
| from ldap.schema.tokenizer import split_tokens,extract_tokens | ||
|
|
||
| class TestTokenize(unittest.TestCase): | ||
| testcases_split_tokens = ( | ||
| (" BLUBBER DI BLUBB ", ["BLUBBER", "DI", "BLUBB"]), | ||
| ("BLUBBER DI BLUBB",["BLUBBER","DI","BLUBB"]), | ||
| ("BLUBBER DI BLUBB ",["BLUBBER","DI","BLUBB"]), | ||
| ("BLUBBER DI 'BLUBB' ",["BLUBBER","DI","BLUBB"]), | ||
| ("BLUBBER ( DI ) 'BLUBB' ",["BLUBBER","(","DI",")","BLUBB"]), | ||
| ("BLUBBER(DI)",["BLUBBER","(","DI",")"]), | ||
| ("BLUBBER ( DI)",["BLUBBER","(","DI",")"]), | ||
| ("BLUBBER ''",["BLUBBER",""]), | ||
| ("( BLUBBER (DI 'BLUBB'))",["(","BLUBBER","(","DI","BLUBB",")",")"]), | ||
| ("BLUBB (DA$BLAH)",['BLUBB',"(","DA","BLAH",")"]), | ||
| ("BLUBB ( DA $ BLAH )",['BLUBB',"(","DA","BLAH",")"]), | ||
| ("BLUBB (DA$ BLAH)",['BLUBB',"(","DA","BLAH",")"]), | ||
| ("BLUBB (DA $BLAH)",['BLUBB',"(","DA","BLAH",")"]), | ||
| ("BLUBB 'DA$BLAH'",['BLUBB',"DA$BLAH"]), | ||
| ("BLUBB DI 'BLU B B ER' DA 'BLAH' ",['BLUBB','DI','BLU B B ER','DA','BLAH']), | ||
| ("BLUBB DI 'BLU B B ER' DA 'BLAH' LABER",['BLUBB','DI','BLU B B ER','DA','BLAH','LABER']), | ||
|
|
||
| #("BLUBBER DI 'BLU'BB ER' DA 'BLAH' ", ["BLUBBER", "DI", "BLU'BB ER", "DA", "BLAH"]), # for Oracle | ||
| #("BLUBB DI 'BLU B B ER'MUST 'BLAH' ",['BLUBB','DI','BLU B B ER','MUST','BLAH']) # for Oracle | ||
| ) | ||
|
|
||
| def test_split_tokens(self): | ||
| for t, r in self.testcases_split_tokens: | ||
| l = ldap.schema.tokenizer.split_tokens(t, {'MUST':None}) | ||
| self.assertEqual(l, r) | ||
|
|
||
|
|
||
| if __name__ == '__main__': | ||
| unittest.main() |