tlopex commented on code in PR #18544:
URL: https://github.com/apache/tvm/pull/18544#discussion_r2591469925
##########
tests/python/relax/test_frontend_from_exported_program.py:
##########
@@ -6525,6 +6527,40 @@ def forward(self, x):
from_exported_program(ep)
+def test_custom_op():
+ class AddOp(Module):
+ def forward(self, x, y):
+ return torch.ops.aten.add.Tensor(x, y)
+
+ @tvm.script.ir_module
+ class Expected:
+ @R.function
+ def main(
+ x: R.Tensor((5,), dtype="float32"),
+ y: R.Tensor((5,), dtype="float32"),
+ ) -> R.Tuple(R.Tensor((5,), dtype="float32")):
+ with R.dataflow():
+ lv: R.Tensor((5,), dtype="float32") = R.subtract(x, y)
+ gv: R.Tuple(R.Tensor((5,), dtype="float32")) = (lv,)
+ R.output(gv)
+ return gv
+
+ from tvm.relax.frontend.torch.exported_program_translator import (
+ ExportedProgramImporter,
+ )
+
+ def custom_add_converter(node: torch.fx.Node, self:
ExportedProgramImporter) -> relax.Var:
+ x = self.env[node.args[0]]
+ y = self.env[node.args[1]]
+
+ return self.block_builder.emit(R.subtract(x, y))
Review Comment:
I didn't catch here. Is this intended to be an addition or a subtraction? cc
@mshr-h
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]