kazimuth opened a new issue #7565:
URL: https://github.com/apache/tvm/issues/7565


   The bring-your-own-codegen c compiler demo fails to handle the following 
relay IR:
   ```
   def @main(%x: Tensor[(10, 10), float32], %y: Tensor[(10, 10), float32]) {
     %0 = (%x, %y);
     %5 = fn (%xy: (Tensor[(10, 10), float32], Tensor[(10, 10), float32]), 
Primitive=1, Compiler="ccompiler", global_symbol="tuple_thingamabob") {
       %1 = %xy.0;
       %2 = %xy.1;
       %3 = add(%1, %2);
       %4 = multiply(%1, %2);
       (%3, %4)
     };
     %6 = %5(%0);
     %6.0
   }
   ```
   Trying to compile this with the Relay VM throws an exception: `Check failed: 
res.size() > static_cast<size_t>(op->index) (1 vs. 1)`, 
[here](https://github.com/apache/tvm/blob/b52267e/src/relay/backend/contrib/codegen_c/codegen.cc#L72).
 However, if the external codegen attributes (`Compiler` and `global_symbol`) 
are removed, then it works fine.
   
   Repro python script (based on `tests/python/relay/test_external_codegen.py`):
   
   ```python
   import os
   import sys
   import numpy as np
   
   import tvm
   import tvm.relay.testing
   import tvm.relay.transform
   from tvm import relay
   from tvm import runtime
   from tvm.contrib import utils
   
   def set_external_func_attr(func, compiler, ext_symbol):
       func = func.with_attr("Primitive", tvm.tir.IntImm("int32", 1))
       func = func.with_attr("Compiler", compiler)
       func = func.with_attr("global_symbol", ext_symbol)
       return func
   
   
   def check_result(mod, map_inputs, out_shape, result, tol=1e-5, 
target="llvm", ctx=tvm.cpu()):
       if sys.platform == "win32":
           print("Skip test on Windows for now")
           return
   
       def update_lib(lib):
           test_dir = 
os.path.dirname(os.path.realpath(os.path.expanduser(__file__)))
           source_dir = os.path.join(test_dir, "..", "..", "..")
           contrib_path = os.path.join(source_dir, "src", "runtime", "contrib")
   
           for m in lib.imported_modules:
               if m.type_key == 'c':
                   print(m.get_source())
   
           kwargs = {}
           kwargs["options"] = ["-O2", "-std=c++14", "-I" + contrib_path]
           tmp_path = utils.tempdir()
           lib_name = "lib.so"
           lib_path = tmp_path.relpath(lib_name)
           lib.export_library(lib_path, fcompile=False, **kwargs)
           lib = tvm.runtime.load_module(lib_path)
   
           return lib
   
       def check_vm_result():
           with tvm.transform.PassContext(opt_level=3, 
disabled_pass=["AlterOpLayout"]):
               exe = relay.vm.compile(mod, target=target)
           code, lib = exe.save()
           lib = update_lib(lib)
           exe = runtime.vm.Executable.load_exec(code, lib)
           vm = runtime.vm.VirtualMachine(exe, ctx)
           out = vm.run(**map_inputs)
           tvm.testing.assert_allclose(out.asnumpy(), result, rtol=tol, 
atol=tol)
   
       def check_graph_runtime_result():
           with tvm.transform.PassContext(opt_level=3, 
disabled_pass=["AlterOpLayout"]):
               json, lib, _ = relay.build(mod, target=target)
           lib = update_lib(lib)
           rt_mod = tvm.contrib.graph_runtime.create(json, lib, ctx)
   
           for name, data in map_inputs.items():
               rt_mod.set_input(name, data)
           rt_mod.run()
           out = tvm.nd.empty(out_shape, ctx=ctx)
           out = rt_mod.get_output(0, out)
   
           tvm.testing.assert_allclose(out.asnumpy(), result, rtol=tol, 
atol=tol)
   
       check_vm_result()
       #check_graph_runtime_result() # tuples don't work in graph runtime 
anyway afaik
   
   def test_tuple():
       x = relay.var("x", shape=(10, 10))
       y = relay.var("y", shape=(10, 10))
       tuple = relay.Tuple([x, y])
   
       xy = relay.var("xy", relay.TupleType([
           relay.TensorType((10, 10), dtype='float32'),
           relay.TensorType((10, 10), dtype='float32'),
       ]))
       x_ = relay.TupleGetItem(xy, 0)
       y_ = relay.TupleGetItem(xy, 1)
       sum = x_ + y_
       prod = x_ * y_
       result = relay.Tuple([sum, prod])
       subgraph = relay.Function([xy], result)
       subgraph = set_external_func_attr(subgraph, "ccompiler", 
"tuple_thingamabob")
   
       call = relay.Call(subgraph, [tuple])
       result = relay.TupleGetItem(call, 0)
       f = relay.Function([x, y], result)
       mod = tvm.IRModule()
       mod["main"] = f
   
       print(mod)
   
       x_data = np.random.rand(10, 10).astype("float32")
       y_data = np.random.rand(10, 10).astype("float32")
   
       check_result(
           mod,
           {
               'x': x_data,
               'y': y_data
           },
           (10, 10),
           (x_data + y_data)
       )
   
   if __name__ == '__main__':
       test_tuple()
   ```


----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to