This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm-ffi.git


The following commit(s) were added to refs/heads/main by this push:
     new 7057705  [Lint] Lint the build torch c dlpack utility (#203)
7057705 is described below

commit 70577053dcbd3c88e8352e137232d5c085997fb3
Author: Yaoyao Ding <[email protected]>
AuthorDate: Wed Oct 29 19:09:00 2025 -0400

    [Lint] Lint the build torch c dlpack utility (#203)
    
    This PR lint the `_optional_build_torch_c_dlpack.py` utility by moving
    some definition to the inside of the main function.
---
 python/tvm_ffi/_optional_torch_c_dlpack.py         | 25 ++++--
 ...dlpack.py => _build_optional_torch_c_dlpack.py} | 89 +++++++++++++++-------
 tests/python/test_optional_torch_c_dlpack.py       | 41 +++++-----
 3 files changed, 100 insertions(+), 55 deletions(-)

diff --git a/python/tvm_ffi/_optional_torch_c_dlpack.py 
b/python/tvm_ffi/_optional_torch_c_dlpack.py
index baf100f..2be2301 100644
--- a/python/tvm_ffi/_optional_torch_c_dlpack.py
+++ b/python/tvm_ffi/_optional_torch_c_dlpack.py
@@ -60,15 +60,26 @@ def load_torch_c_dlpack_extension() -> Any:
 
         # check whether a JIT shared library is built in cache
         cache_dir = Path(os.environ.get("TVM_FFI_CACHE_DIR", 
"~/.cache/tvm-ffi")).expanduser()
-        addon_build_dir = cache_dir / "torch_c_dlpack_addon"
-        lib_path = addon_build_dir / (
-            "libtorch_c_dlpack_addon" + (".dll" if sys.platform == "win32" 
else ".so")
-        )
+        addon_output_dir = cache_dir
+        major, minor = torch.__version__.split(".")[:2]
+        device = "cpu" if not torch.cuda.is_available() else "cuda"
+        suffix = ".dll" if sys.platform.startswith("win") else ".so"
+        libname = 
f"libtorch_c_dlpack_addon_torch{major}{minor}-{device}{suffix}"
+        lib_path = addon_output_dir / libname
         if not lib_path.exists():
-            build_script_path = Path(__file__).parent / "utils" / 
"_build_optional_c_dlpack.py"
-            args = [sys.executable, str(build_script_path), "--build_dir", 
str(addon_build_dir)]
+            build_script_path = (
+                Path(__file__).parent / "utils" / 
"_build_optional_torch_c_dlpack.py"
+            )
+            args = [
+                sys.executable,
+                str(build_script_path),
+                "--output-dir",
+                str(cache_dir),
+                "--libname",
+                libname,
+            ]
             if torch.cuda.is_available():
-                args.append("--build_with_cuda")
+                args.append("--build-with-cuda")
             subprocess.run(
                 args,
                 check=True,
diff --git a/python/tvm_ffi/utils/_build_optional_c_dlpack.py 
b/python/tvm_ffi/utils/_build_optional_torch_c_dlpack.py
similarity index 90%
rename from python/tvm_ffi/utils/_build_optional_c_dlpack.py
rename to python/tvm_ffi/utils/_build_optional_torch_c_dlpack.py
index 166a69e..0c0724c 100644
--- a/python/tvm_ffi/utils/_build_optional_c_dlpack.py
+++ b/python/tvm_ffi/utils/_build_optional_torch_c_dlpack.py
@@ -23,6 +23,7 @@ import os
 import shutil
 import sys
 import sysconfig
+import tempfile
 from collections.abc import Sequence
 from pathlib import Path
 
@@ -30,12 +31,7 @@ import torch
 import torch.torch_version
 import torch.utils.cpp_extension
 
-# we need to set the following env to avoid tvm_ffi to build the torch 
c-dlpack addon during importing
-os.environ["TVM_FFI_DISABLE_TORCH_C_DLPACK"] = "1"
-
-from tvm_ffi.cpp.load_inline import build_ninja
-from tvm_ffi.libinfo import find_dlpack_include_path
-from tvm_ffi.utils.lockfile import FileLock
+# Important: to avoid cyclic dependency, we avoid import tvm_ffi names at top 
level here.
 
 IS_WINDOWS = sys.platform == "win32"
 
@@ -580,6 +576,8 @@ def _generate_ninja_build(
     extra_include_paths: Sequence[str],
 ) -> None:
     """Generate the content of build.ninja for building the module."""
+    from tvm_ffi.libinfo import find_dlpack_include_path  # noqa: PLC0415
+
     if IS_WINDOWS:
         default_cflags = [
             "/std:c++17",
@@ -665,36 +663,69 @@ def get_torch_include_paths(build_with_cuda: bool) -> 
Sequence[str]:
         return torch.utils.cpp_extension.include_paths(cuda=build_with_cuda)
 
 
-parser = argparse.ArgumentParser()
-parser.add_argument(
-    "--build_dir",
-    type=str,
-    default=str(Path("~/.cache/tvm-ffi/torch_c_dlpack_addon").expanduser()),
-    help="Directory to store the built extension library.",
-)
-parser.add_argument(
-    "--build_with_cuda",
-    action="store_true",
-    default=torch.cuda.is_available(),
-    help="Build with CUDA support.",
-)
-
-
 def main() -> None:  # noqa: PLR0912, PLR0915
     """Build the torch c dlpack extension."""
+    # we need to set the following env to avoid tvm_ffi to build the torch 
c-dlpack addon during importing
+    os.environ["TVM_FFI_DISABLE_TORCH_C_DLPACK"] = "1"
+    from tvm_ffi.cpp.load_inline import build_ninja  # noqa: PLC0415
+    from tvm_ffi.utils.lockfile import FileLock  # noqa: PLC0415
+
+    parser = argparse.ArgumentParser(
+        description="Build the torch c dlpack extension. After building, a 
shared library will be placed in the output directory.",
+    )
+    parser.add_argument(
+        "--build-dir",
+        type=str,
+        required=False,
+        help="Directory to store the built extension library. If not provided, 
a temporary directory will be used.",
+    )
+    parser.add_argument(
+        "--output-dir",
+        type=str,
+        required=False,
+        default=str(Path(os.environ.get("TVM_FFI_CACHE_DIR", 
"~/.cache/tvm-ffi")).expanduser()),
+        help="Directory to store the built extension library. If not 
specified, the default cache directory of tvm-ffi will be used.",
+    )
+    parser.add_argument(
+        "--build-with-cuda",
+        action="store_true",
+        help="Build with CUDA support.",
+    )
+    parser.add_argument(
+        "--libname",
+        type=str,
+        default="auto",
+        help="The name of the generated library. It can be a name 'auto' to 
auto-generate a name following 
'libtorch_c_dlpack_addon_torch{version.major}{version.minor}-cpu/cuda.{extension}'.",
+    )
+
     args = parser.parse_args()
-    build_dir = Path(args.build_dir)
 
+    # resolve build directory
+    if args.build_dir is None:
+        build_dir = Path(tempfile.mkdtemp(prefix="tvm-ffi-torch-c-dlpack-"))
+    else:
+        build_dir = Path(args.build_dir)
+    build_dir = build_dir.resolve()
     if not build_dir.exists():
         build_dir.mkdir(parents=True, exist_ok=True)
 
-    name = "libtorch_c_dlpack_addon"
-    suffix = ".dll" if IS_WINDOWS else ".so"
-    libname = name + suffix
-    tmp_libname = name + ".tmp" + suffix
+    # resolve library name
+    if args.libname == "auto":
+        major, minor = torch.__version__.split(".")[:2]
+        device = "cpu" if not args.build_with_cuda else "cuda"
+        suffix = ".dll" if IS_WINDOWS else ".so"
+        libname = 
f"libtorch_c_dlpack_addon_torch{major}{minor}-{device}{suffix}"
+    else:
+        libname = args.libname
+    tmp_libname = libname + ".tmp"
+
+    # create output directory is not exists
+    output_dir = Path(args.output_dir).expanduser()
+    if not output_dir.exists():
+        output_dir.mkdir(parents=True, exist_ok=True)
 
-    with FileLock(str(build_dir / "build.lock")):
-        if (build_dir / libname).exists():
+    with FileLock(str(output_dir / (libname + ".lock"))):
+        if (output_dir / libname).exists():
             # already built
             return
 
@@ -773,7 +804,7 @@ def main() -> None:  # noqa: PLR0912, PLR0915
         build_ninja(build_dir=str(build_dir))
 
         # rename the tmp file to final libname
-        shutil.move(str(build_dir / tmp_libname), str(build_dir / libname))
+        shutil.move(str(build_dir / tmp_libname), str(output_dir / libname))
 
 
 if __name__ == "__main__":
diff --git a/tests/python/test_optional_torch_c_dlpack.py 
b/tests/python/test_optional_torch_c_dlpack.py
index 33254e3..fe0bac5 100644
--- a/tests/python/test_optional_torch_c_dlpack.py
+++ b/tests/python/test_optional_torch_c_dlpack.py
@@ -35,16 +35,20 @@ IS_WINDOWS = sys.platform.startswith("win")
 
 @pytest.mark.skipif(torch is None, reason="torch is not installed")
 def test_build_torch_c_dlpack_extension() -> None:
-    build_script = Path(tvm_ffi.__file__).parent / "utils" / 
"_build_optional_c_dlpack.py"
-    subprocess.run(
-        [sys.executable, str(build_script), "--build_dir", 
"./build_test_dir"], check=True
-    )
+    build_script = Path(tvm_ffi.__file__).parent / "utils" / 
"_build_optional_torch_c_dlpack.py"
+    args = [
+        sys.executable,
+        str(build_script),
+        "--output-dir",
+        "./output-dir",
+        "--libname",
+        "libtorch_c_dlpack_addon_test.so",
+    ]
+    if torch.cuda.is_available():
+        args.append("--build-with-cuda")
+    subprocess.run(args, check=True)
 
-    lib_path = str(
-        Path(
-            "./build_test_dir/libtorch_c_dlpack_addon.{}".format("dll" if 
IS_WINDOWS else "so")
-        ).resolve()
-    )
+    lib_path = 
str(Path("./output-dir/libtorch_c_dlpack_addon_test.so").resolve())
     assert Path(lib_path).exists()
 
     lib = ctypes.CDLL(lib_path)
@@ -56,22 +60,21 @@ def test_build_torch_c_dlpack_extension() -> None:
 
 @pytest.mark.skipif(torch is None, reason="torch is not installed")
 def test_parallel_build() -> None:
-    build_script = Path(tvm_ffi.__file__).parent / "utils" / 
"_build_optional_c_dlpack.py"
+    build_script = Path(tvm_ffi.__file__).parent / "utils" / 
"_build_optional_torch_c_dlpack.py"
     num_processes = 4
-    build_dir = "./build_test_dir_parallel"
+    output_dir = "./output-dir-parallel"
+    libname = "libtorch_c_dlpack_addon_test.so"
     processes = []
     for i in range(num_processes):
-        p = subprocess.Popen([sys.executable, str(build_script), 
"--build_dir", build_dir])
-        processes.append((p, build_dir))
+        p = subprocess.Popen(
+            [sys.executable, str(build_script), "--output-dir", output_dir, 
"--libname", libname]
+        )
+        processes.append((p, output_dir))
 
-    for p, build_dir in processes:
+    for p, output_dir in processes:
         p.wait()
         assert p.returncode == 0
-    lib_path = str(
-        Path(
-            "{}/libtorch_c_dlpack_addon.{}".format(build_dir, "dll" if 
IS_WINDOWS else "so")
-        ).resolve()
-    )
+    lib_path = str(Path(f"{output_dir}/{libname}").resolve())
     assert Path(lib_path).exists()
 
 

Reply via email to