This is an automated email from the ASF dual-hosted git repository.

xiaoxiang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/nuttx.git

commit b296b1debe5ab2b4b9f8c69a88c952d1fb15e139
Author: xuxingliang <[email protected]>
AuthorDate: Thu Oct 31 14:15:06 2024 +0800

    gdb/macro: cache macro info to json and load directly
    
    Use json module to save macro info to json file and load directly. It can 
save 2seconds for x4b projects to load plugin
    
    Signed-off-by: xuxingliang <[email protected]>
---
 tools/gdb/nuttxgdb/macros.py | 44 ++++++++++++++++++++++++++------------------
 tools/gdb/nuttxgdb/utils.py  |  1 -
 2 files changed, 26 insertions(+), 19 deletions(-)

diff --git a/tools/gdb/nuttxgdb/macros.py b/tools/gdb/nuttxgdb/macros.py
index bf7b960eb3..aca22a47f3 100644
--- a/tools/gdb/nuttxgdb/macros.py
+++ b/tools/gdb/nuttxgdb/macros.py
@@ -38,6 +38,7 @@
 # Currently, we are using the second method.
 
 import hashlib
+import json
 import os
 import re
 import time
@@ -132,30 +133,37 @@ def fetch_macro_info(file):
     with open(file, "rb") as f:
         hash = hashlib.md5(f.read()).hexdigest()
 
-    cache = path.join(path.dirname(path.abspath(file)), f"{hash}.macro")
+    macros = {}
+    p = re.compile(".*macro[ ]*:[ ]*([\S]+\(.*?\)|[\w]+)[ ]*(.*)")
+    cache = path.join(path.dirname(path.abspath(file)), f"{hash}.json")
+    print(f"Load macro: {cache}")
     if not path.isfile(cache):
-        start = time.time()
+        t = time.time()
         os.system(f'readelf -wm "{file}" > "{cache}"')
-        print(f"readelf took {time.time() - start:.1f} seconds")
-        print(f"Cache macro info to {cache}")
-    else:
-        print(f"Load macro info from {cache}")
+        print(f"readelf took {time.time() - t:.1f} seconds")
 
-    p = re.compile(".*macro[ ]*:[ ]*([\S]+\(.*?\)|[\w]+)[ ]*(.*)")
-    macros = {}
+        t = time.time()
+        with open(cache, "r") as f2:
+            for line in f2.readlines():
+                if not line.startswith(" DW_MACRO_define") and not 
line.startswith(
+                    " DW_MACRO_undef"
+                ):
+                    continue
+
+                if not parse_macro(line, macros, p):
+                    print(f"Failed to parse {line}")
+
+        print(f"Parse macro took {time.time() - t:.1f} seconds")
 
-    start = time.time()
-    with open(cache, "r") as f2:
-        for line in f2.readlines():
-            if not line.startswith(" DW_MACRO_define") and not line.startswith(
-                " DW_MACRO_undef"
-            ):
-                continue
+        with open(cache, "w") as f2:
+            dump = json.dumps(macros, indent=4, sort_keys=True)
+            f2.write(dump)
 
-            if not parse_macro(line, macros, p):
-                print(f"Failed to parse {line}")
+        print(f"Cache macro info to {cache}")
+    else:
+        with open(cache, "r") as f2:
+            macros = json.load(f2)
 
-    print(f"Parse macro took {time.time() - start:.1f} seconds")
     return macros
 
 
diff --git a/tools/gdb/nuttxgdb/utils.py b/tools/gdb/nuttxgdb/utils.py
index 740012e535..81c44f6159 100644
--- a/tools/gdb/nuttxgdb/utils.py
+++ b/tools/gdb/nuttxgdb/utils.py
@@ -211,7 +211,6 @@ def get_symbol_value(name, locspec="nx_start", 
cacheable=True):
         # Try to expand macro by reading elf
         global g_macro_ctx
         if not g_macro_ctx:
-            gdb.write("No macro context found, trying to load from ELF\n")
             if len(gdb.objfiles()) > 0:
                 g_macro_ctx = MacroCtx(gdb.objfiles()[0].filename)
             else:

Reply via email to