From 4227c0517e761323c553423410568b36d7b32a6d Mon Sep 17 00:00:00 2001 From: stroeder Date: Wed, 22 Feb 2017 21:16:23 +0000 Subject: [PATCH] reworked _run_failure_tests() --- Tests/t_ldap_schema_tokenizer.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/Tests/t_ldap_schema_tokenizer.py b/Tests/t_ldap_schema_tokenizer.py index ebd6819..95c1f1b 100644 --- a/Tests/t_ldap_schema_tokenizer.py +++ b/Tests/t_ldap_schema_tokenizer.py @@ -52,10 +52,10 @@ TESTCASES_BROKEN = ( "( BLUB", "BLUB )", - "( BLUB )) DA (", "BLUB 'DA", - r"BLUB 'DA\'", "BLUB $ DA", + "BLUB 'DA\\'", + "( BLUB )) DA (", ) class TestSplitTokens(unittest.TestCase): @@ -70,12 +70,20 @@ def _run_split_tokens_tests(self, test_cases): def _run_failure_tests(self, test_cases): for test_value in test_cases: + should_have_failed = [] try: _ = ldap.schema.split_tokens(test_value) except ValueError: pass else: - self.fail('%r should have raised ValueError' % (test_value)) + should_have_failed.append(test_value) + if should_have_failed: + self.fail( + '%d value(s) should have raised ValueError: %r' % ( + len(should_have_failed), + should_have_failed, + ) + ) def test_basic(self): """