Test the errors that are rised and the ones that are logged.

Signed-off-by: Mauro Carvalho Chehab <[email protected]>
---
 tools/lib/python/kdoc/c_lex.py    |  2 +-
 tools/unittests/test_cmatch.py    | 15 ++++++++++++++-
 tools/unittests/test_tokenizer.py | 11 ++++++-----
 3 files changed, 21 insertions(+), 7 deletions(-)

diff --git a/tools/lib/python/kdoc/c_lex.py b/tools/lib/python/kdoc/c_lex.py
index 596510bb4e95..8beac59166fc 100644
--- a/tools/lib/python/kdoc/c_lex.py
+++ b/tools/lib/python/kdoc/c_lex.py
@@ -194,7 +194,7 @@ class CTokenizer():
             value = match.group()
 
             if kind == CToken.MISMATCH:
-                log.error(f"Unexpected token '{value}' on {pos}:\n\t{source}")
+                log.error(f"Unexpected token '{value}' on pos 
{pos}:\n\t'{source}'")
             elif kind == CToken.BEGIN:
                 if value == '(':
                     paren_level += 1
diff --git a/tools/unittests/test_cmatch.py b/tools/unittests/test_cmatch.py
index f6ccd2a942f1..3fbc5d3bc244 100755
--- a/tools/unittests/test_cmatch.py
+++ b/tools/unittests/test_cmatch.py
@@ -288,6 +288,19 @@ class TestSubSimple(TestCaseDiff):
 
         self.assertLogicallyEqual(result, "int foo;")
 
+    def test_rise_early_greedy(self):
+        line = f"{self.MACRO}(a, b, c, d);"
+        sub = r"\1, \2+, \3"
+
+        with self.assertRaises(ValueError):
+            result = self.matcher.sub(sub, line)
+
+    def test_rise_multiple_greedy(self):
+        line = f"{self.MACRO}(a, b, c, d);"
+        sub = r"\1, \2+, \3+"
+
+        with self.assertRaises(ValueError):
+            result = self.matcher.sub(sub, line)
 
 #
 # Test replacements with slashrefs
@@ -539,7 +552,7 @@ class TestSubWithLocalXforms(TestCaseDiff):
         self.assertLogicallyEqual(result, expected)
 
     def test_raw_struct_group_tagged(self):
-        """
+        r"""
         Test cxl_regs with struct_group_tagged patterns from drivers/cxl/cxl.h.
 
         NOTE:
diff --git a/tools/unittests/test_tokenizer.py 
b/tools/unittests/test_tokenizer.py
index 3081f27a7786..6a0bd49df72e 100755
--- a/tools/unittests/test_tokenizer.py
+++ b/tools/unittests/test_tokenizer.py
@@ -44,11 +44,12 @@ def make_tokenizer_test(name, data):
         """In-lined lambda-like function to run the test"""
 
         #
-        # Check if exceptions are properly handled
+        # Check if logger is working
         #
-        if "raises" in data:
-            with self.assertRaises(data["raises"]):
-                CTokenizer(data["source"])
+        if "log_level" in data:
+            with self.assertLogs('kdoc.c_lex', level='ERROR') as cm:
+                tokenizer = CTokenizer(data["source"])
+
             return
 
         #
@@ -123,7 +124,7 @@ TESTS_TOKENIZER = {
 
     "mismatch_error": {
         "source": "int a$ = 5;",          # $ is illegal
-        "raises": RuntimeError,
+        "log_level": "ERROR",
     },
 }
 
-- 
2.52.0


Reply via email to