Re: [I] [Bug] a typo mistake in pytorch frontend nonzero_numpy [tvm]

2024-01-12 Thread via GitHub


masahi closed issue #16389: [Bug] a typo mistake in pytorch frontend 
nonzero_numpy
URL: https://github.com/apache/tvm/issues/16389


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch nightly updated (ae8d398b88 -> 196b413813)

2024-01-12 Thread github-bot
This is an automated email from the ASF dual-hosted git repository.

github-bot pushed a change to branch nightly
in repository https://gitbox.apache.org/repos/asf/tvm.git


from ae8d398b88 [CI] In jenkins.cmd_utils.Sh.tee, check for failing 
subprocess (#16382)
 add f1bf20a950 [RPC] Fix tuning on macOS and Windows (#15771) (#16357)
 add 4258c864b9 [RUNTIME][RPC] Enable RPCObjectRef return in RPC (#16387)
 add 196b413813 [Relay][Frontend][Torch] fix a typo mistake in 
nonzero_numpy (#16390)

No new revisions were added by this update.

Summary of changes:
 include/tvm/runtime/object.h  |  4 ++-
 python/tvm/relay/frontend/pytorch.py  |  2 +-
 python/tvm/rpc/server.py  | 11 +++---
 src/runtime/minrpc/minrpc_server.h| 15 ++--
 src/runtime/minrpc/rpc_reference.h|  8 +
 src/runtime/rpc/rpc_endpoint.cc   | 51 +++
 src/runtime/rpc/rpc_local_session.cc  | 20 +--
 src/runtime/rpc/rpc_module.cc |  7 
 src/runtime/rpc/rpc_session.h | 51 ++-
 tests/python/frontend/pytorch/test_forward.py |  1 +
 tests/python/runtime/test_runtime_rpc.py  | 31 
 11 files changed, 182 insertions(+), 19 deletions(-)



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450895642


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+z = R.add(x, y)
+q = R.multiply(z, y)
+p = R.add(z, q)
+n = R.multiply(p, p)
+R.output(n)
+return n
+
+block = BasicLiveness["main"].body.blocks[0]
+live_ranges = dataflow_liveness_analysis(block)
+expected_ranges = {
+"x": (-1, 1),
+"y": (0, 2),
+"z": (1, 3),
+"q": (2, 3),
+"p": (3, 4),
+"n": (4, 5),
+}
+for var, live_range in live_ranges.items():
+assert live_range == expected_ranges[var.name_hint]
+
+
+def test_alias_analysis_basic():
+@I.ir_module
+class BasicAliasAnalysis:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = x  # y is an alias of x
+z = R.add(y, y)  # fresh value
+n = z  # alias of z
+R.output(n)
+return n
+
+block = BasicAliasAnalysis["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
BasicAliasAnalysis["main"].params)
+expected = {
+"x": {0},
+"y": {0},
+"z": {1},
+"n": {1},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert tuple_map == {}
+
+
+def test_alias_analysis_tuple():
+@I.ir_module
+class AliasesWithTuples:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+t = (x, y)
+a = t[0]
+b = t[1]
+c = t[0]
+d = t[1]
+u = t
+e = t[0]
+f = t[1]
+z = R.add(c, d)
+n = z
+R.output(n)
+return n
+
+block = AliasesWithTuples["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasesWithTuples["main"].params)
+expected = {
+"x": {0},
+"y": {1},
+"t": {2},
+"a": {0},
+"b": {1},
+"c": {0},
+"d": {1},
+"u": {2},
+"e": {0},
+"f": {1},
+"z": {3},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert 2 in tuple_map
+assert tuple_map[2] == [{0}, {1}]
+
+
+def test_alias_split():
+@I.ir_module
+class AliasSplit:
+@R.function
+def main(x: R.Tensor((60,), "int32")) -> R.Tensor((15,), "int32"):
+with R.dataflow():
+t = R.split(x, 4)
+y = t[0]
+z = t[1]
+q = t[2]
+p = t[3]
+n = z
+R.output(n)
+return n
+
+block = AliasSplit["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasSplit["main"].params)
+expected = {
+"x": {0},
+"t": {1},
+"y": {2},
+"z": {3},
+"q": {4},
+"p": {5},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert len(tuple_map) == 1
+assert 1 in t

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450894366


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info = 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450891802


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info = 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450891113


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info = 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450883667


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info = 

(tvm) branch unity updated: [Unity][nnModule] Dynamic shape support in nn Module (#16284)

2024-01-12 Thread tqchen
This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a commit to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/unity by this push:
 new 07d8e02367 [Unity][nnModule] Dynamic shape support in nn Module 
(#16284)
07d8e02367 is described below

commit 07d8e0236791e5d9069f2b4f9227bc33f77b328b
Author: Charlie Ruan <53290280+charliefr...@users.noreply.github.com>
AuthorDate: Sat Jan 13 03:44:01 2024 +0800

[Unity][nnModule] Dynamic shape support in nn Module (#16284)

* [Unity][nnModule] Dynamic shape support in nn Module
---
 python/tvm/relax/frontend/nn/core.py | 15 +++
 python/tvm/relax/frontend/nn/exporter.py | 21 ++---
 python/tvm/relax/frontend/nn/modules.py  | 11 ---
 3 files changed, 37 insertions(+), 10 deletions(-)

diff --git a/python/tvm/relax/frontend/nn/core.py 
b/python/tvm/relax/frontend/nn/core.py
index 9c99ba6177..8eeffd8758 100644
--- a/python/tvm/relax/frontend/nn/core.py
+++ b/python/tvm/relax/frontend/nn/core.py
@@ -128,13 +128,15 @@ class Tensor(_TensorOp):
 
 @staticmethod
 def placeholder(
-shape: Sequence[Union[int, tir.PrimExpr]],
+shape: Sequence[Union[int, str, tir.PrimExpr]],
 dtype: str,
 name: str = "tensor",
 ) -> "Tensor":
 """Create a placeholder tensor with given shape and dtype. A 
placeholder tensor should
 never be created directly by users in usual cases, and the only 
exception is to indicate
 the shape/dtype of return values of an external function.
+
+If shape is a string `name`, we create a symbolic shape 
`tvm.tir.Var(name, "int64")`.
 """
 new_shape = []
 for expr in shape:
@@ -143,6 +145,10 @@ class Tensor(_TensorOp):
 assert expr >= 0
 new_shape.append(expr)
 continue
+if isinstance(expr, str):
+expr = tir.Var(expr, "int64")
+new_shape.append(expr)
+continue
 if not isinstance(expr, tir.PrimExpr):
 raise TypeError(f"Invalid shape: {shape}")
 assert expr.dtype == "int64"
@@ -214,7 +220,7 @@ class Parameter(Tensor):
 
 def __init__(
 self,
-shape: Sequence[Union[int, tir.PrimExpr]],
+shape: Sequence[Union[int, str, tir.PrimExpr]],
 dtype: Optional[str] = None,
 ) -> None:
 """Create a parameter with given shape and dtype. The parameter is not 
bound to any
@@ -222,8 +228,9 @@ class Parameter(Tensor):
 
 Parameters
 --
-shape : Sequence[Union[int, tir.PrimExpr]]
-The shape of the parameter
+shape : Sequence[Union[int, str, tir.PrimExpr]]
+The shape of the parameter. If it is a string `name`, we create a 
symbolic shape
+`tvm.tir.Var(name, "int64")`.
 dtype : Optional[str]
 The data type of the parameter. If not specified, the default 
dtype will be used.
 """
diff --git a/python/tvm/relax/frontend/nn/exporter.py 
b/python/tvm/relax/frontend/nn/exporter.py
index 99591c8a3e..d452af69d3 100644
--- a/python/tvm/relax/frontend/nn/exporter.py
+++ b/python/tvm/relax/frontend/nn/exporter.py
@@ -111,8 +111,7 @@ class Exporter:
 return result
 
 # pylint: enable=protected-access
-
-params = _params()
+params = None
 effects = _effects()
 ext_mods = self.extern_mods
 with self:
@@ -122,6 +121,7 @@ class Exporter:
 outputs = _emit_effect_init(self.builder, effects)
 self.builder.emit_func_output(outputs, params=[])
 for method_name, method_spec in zip(spec.method_names, 
spec.method_specs):
+params = _params()  # Re-initialize so symbolic shapes not 
shared across methods
 len_args = len(method_spec.arg_specs)
 len_effects = {
 "packed": 1,
@@ -159,6 +159,9 @@ def _emit_method(  # pylint: 
disable=too-many-locals,too-many-branches,too-many-
 effects: typing.Optional[typing.List[typing.Tuple[str, core.Effect]]],
 ):
 # pylint: disable=protected-access
+# symbolic shape's name mapping to its tir.Var for reuse
+str2var_params: typing.Dict[str, tir.Var] = {}
+
 def _unwrap_ret(expr: typing.Any) -> typing.Any:
 if isinstance(expr, (core.Tensor, core.Object)):
 return expr._expr
@@ -184,8 +187,20 @@ def _emit_method(  # pylint: 
disable=too-many-locals,too-many-branches,too-many-
 
 def _params(mode: str) -> typing.List[rx.Var]:
 inputs: typing.List[rx.Var] = []
+
+def _get_var(shape_var: tir.Var) -> tir.Var:
+name = shape_var.name
+if name in str2var_params:
+return str2var_params[name]
+var = tir.Var(name, "int64")
+str2var_params[

Re: [PR] [Unity][nnModule] Dynamic shape support in nn Module [tvm]

2024-01-12 Thread via GitHub


tqchen merged PR #16284:
URL: https://github.com/apache/tvm/pull/16284


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450854500


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+z = R.add(x, y)
+q = R.multiply(z, y)
+p = R.add(z, q)
+n = R.multiply(p, p)
+R.output(n)
+return n
+
+block = BasicLiveness["main"].body.blocks[0]
+live_ranges = dataflow_liveness_analysis(block)
+expected_ranges = {
+"x": (-1, 1),
+"y": (0, 2),
+"z": (1, 3),
+"q": (2, 3),
+"p": (3, 4),
+"n": (4, 5),
+}
+for var, live_range in live_ranges.items():
+assert live_range == expected_ranges[var.name_hint]
+
+
+def test_alias_analysis_basic():
+@I.ir_module
+class BasicAliasAnalysis:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = x  # y is an alias of x
+z = R.add(y, y)  # fresh value
+n = z  # alias of z
+R.output(n)
+return n
+
+block = BasicAliasAnalysis["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
BasicAliasAnalysis["main"].params)
+expected = {
+"x": {0},
+"y": {0},
+"z": {1},
+"n": {1},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert tuple_map == {}
+
+
+def test_alias_analysis_tuple():
+@I.ir_module
+class AliasesWithTuples:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+t = (x, y)
+a = t[0]
+b = t[1]
+c = t[0]
+d = t[1]
+u = t
+e = t[0]
+f = t[1]
+z = R.add(c, d)
+n = z
+R.output(n)
+return n
+
+block = AliasesWithTuples["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasesWithTuples["main"].params)
+expected = {
+"x": {0},
+"y": {1},
+"t": {2},
+"a": {0},
+"b": {1},
+"c": {0},
+"d": {1},
+"u": {2},
+"e": {0},
+"f": {1},
+"z": {3},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert 2 in tuple_map
+assert tuple_map[2] == [{0}, {1}]
+
+
+def test_alias_split():
+@I.ir_module
+class AliasSplit:
+@R.function
+def main(x: R.Tensor((60,), "int32")) -> R.Tensor((15,), "int32"):
+with R.dataflow():
+t = R.split(x, 4)
+y = t[0]
+z = t[1]
+q = t[2]
+p = t[3]
+n = z
+R.output(n)
+return n
+
+block = AliasSplit["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasSplit["main"].params)
+expected = {
+"x": {0},
+"t": {1},
+"y": {2},
+"z": {3},
+"q": {4},
+"p": {5},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert len(tuple_map) == 1
+assert 1 in

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450853692


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+z = R.add(x, y)
+q = R.multiply(z, y)
+p = R.add(z, q)
+n = R.multiply(p, p)
+R.output(n)
+return n
+
+block = BasicLiveness["main"].body.blocks[0]
+live_ranges = dataflow_liveness_analysis(block)
+expected_ranges = {
+"x": (-1, 1),
+"y": (0, 2),
+"z": (1, 3),
+"q": (2, 3),
+"p": (3, 4),
+"n": (4, 5),
+}
+for var, live_range in live_ranges.items():
+assert live_range == expected_ranges[var.name_hint]
+
+
+def test_alias_analysis_basic():
+@I.ir_module
+class BasicAliasAnalysis:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = x  # y is an alias of x
+z = R.add(y, y)  # fresh value
+n = z  # alias of z
+R.output(n)
+return n
+
+block = BasicAliasAnalysis["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
BasicAliasAnalysis["main"].params)
+expected = {
+"x": {0},
+"y": {0},
+"z": {1},
+"n": {1},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert tuple_map == {}
+
+
+def test_alias_analysis_tuple():
+@I.ir_module
+class AliasesWithTuples:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+t = (x, y)
+a = t[0]
+b = t[1]
+c = t[0]
+d = t[1]
+u = t
+e = t[0]
+f = t[1]
+z = R.add(c, d)
+n = z
+R.output(n)
+return n
+
+block = AliasesWithTuples["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasesWithTuples["main"].params)
+expected = {
+"x": {0},
+"y": {1},
+"t": {2},
+"a": {0},
+"b": {1},
+"c": {0},
+"d": {1},
+"u": {2},
+"e": {0},
+"f": {1},
+"z": {3},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert 2 in tuple_map
+assert tuple_map[2] == [{0}, {1}]
+
+
+def test_alias_split():
+@I.ir_module
+class AliasSplit:
+@R.function
+def main(x: R.Tensor((60,), "int32")) -> R.Tensor((15,), "int32"):
+with R.dataflow():
+t = R.split(x, 4)
+y = t[0]
+z = t[1]
+q = t[2]
+p = t[3]
+n = z
+R.output(n)
+return n
+
+block = AliasSplit["main"].body.blocks[0]
+alias_sets, tuple_map = dataflow_alias_analysis(block, 
AliasSplit["main"].params)
+expected = {
+"x": {0},
+"t": {1},
+"y": {2},
+"z": {3},
+"q": {4},
+"p": {5},
+"n": {3},
+}
+
+for var, alias_set in alias_sets.items():
+assert alias_set == expected[var.name_hint]
+assert len(tuple_map) == 1
+assert 1 in

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450853110


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+z = R.add(x, y)
+q = R.multiply(z, y)
+p = R.add(z, q)
+n = R.multiply(p, p)
+R.output(n)
+return n
+
+block = BasicLiveness["main"].body.blocks[0]
+live_ranges = dataflow_liveness_analysis(block)
+expected_ranges = {
+"x": (-1, 1),

Review Comment:
   That's a good point, I hadn't thought about it. Values that come from 
outside the DF block should be considered to be live for the whole thing anyway 
since we're not checking if they're used again.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450850437


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450847919


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450847241


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450846628


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450846326


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450844360


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450843319


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450841974


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450840970


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():

Review Comment:
   Good question. Yes, the analysis depends on dataflow blocks and makes use of 
the lack of side effects in particular. The transformation won't do anything 
unless the analysis finds eligible operators to rewrite.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450840195


##
src/relax/transform/dataflow_inplace.cc:
##
@@ -0,0 +1,1017 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ *
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*!
+ * \file src/relax/transform/dataflow_inplace.cc
+ * \brief Pass that converts eligible operator calls in dataflow blocks
+ *   into in-place versions.
+ */
+
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+#include 
+
+#include "utils.h"
+
+namespace tvm {
+namespace relax {
+
+// Perform liveness analysis on a dataflow block, returning a map of vars to
+// pairs of indices (the liveness interval, from the starting index to the end 
index).
+// A starting index of -1 means the var is defined before the block starts and 
an end index
+// of block->bindings.size() (one past the last index) means it is live after 
the block ends.
+std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
AnalyzeLiveness(
+const DataflowBlock& block) {
+  std::unordered_map, ObjectPtrHash, ObjectPtrEqual> 
ret;
+  for (int i = block->bindings.size() - 1; i >= 0; i--) {
+Binding b = block->bindings[i];
+Var defined_var = b->var;
+Expr value = GetBoundValue(b);
+Array used_vars;
+// for a function literal, we consider only the free vars
+// (those captured from the outer scope)
+if (value.as()) {
+  used_vars = FreeVars(value);
+} else if (value.as()) {
+  // Special case: we do not consider a tuple index to be a "use."
+  // This is a bit of a hack but allows us to do operations that
+  // create tuples to be done in-place (otherwise, any index of the tuple
+  // would be considered a use and so the tuple would be live later).
+  // Hence we keep the array empty.
+} else {
+  used_vars = AllVars(value);
+}
+
+for (auto var : used_vars) {
+  if (!ret.count(var)) {
+ret[var] = {-1, i};
+  }
+}
+
+if (!ret.count(defined_var)) {
+  // if it's an output, then it lives past the end of the block
+  if (!defined_var.as()) {
+ret[defined_var] = {i, block->bindings.size()};
+  } else {
+// otherwise, it's live only here
+ret[defined_var] = {i, i};
+  }
+} else {
+  // this means the var is used later but we encountered its definition now
+  auto last_range = ret[defined_var];
+  CHECK_EQ(last_range.first, -1);
+  std::pair new_range = {i, last_range.second};
+  ret[defined_var] = new_range;
+}
+  }
+  return ret;
+}
+
+class AliasAnalyzer {
+ public:
+  AliasAnalyzer() : alias_map_(), tuple_map_(), mem_idx_(0) {}
+
+  // The analysis returns a map of vars to memory locations that it *could* 
map to
+  // (any unique allocation = one memory location), plus a map of memory 
locations
+  // that correspond to tuples (this maps to sets of memory locations for each 
tuple element).
+  // Note: inputs are values that should be assumed not to be aliased and are 
therefore
+  // (in the case of in-place ops) safe to overwrite. This may not be true of 
function args.
+  std::pair, ObjectPtrHash, 
ObjectPtrEqual>,
+std::unordered_map>>>
+  Analyze(const DataflowBlock& block, const Array& inputs) {
+for (auto input : inputs) {
+  int curr_idx = get_fresh_idx();
+  alias_map_[input] = {curr_idx};
+  if (auto* tup_info = GetStructInfoAs(input)) {
+InsertFreshTuple(curr_idx, tup_info);
+  }
+}
+
+for (const Binding& binding : block->bindings) {
+  Var current_var = binding->var;
+  Expr value = GetBoundValue(binding);
+  alias_map_[current_var] = GetAliasSet(value, current_var);
+}
+
+return {alias_map_, tuple_map_};
+  }
+
+ private:
+  int get_fresh_idx() {
+int ret = mem_idx_;
+mem_idx_++;
+return ret;
+  }
+
+  // Fresh tuple = each element is assumed to be a unique allocation
+  void InsertFreshTuple(int tup_idx, const TupleStructInfoNode* tup_info) {
+std::vector> tuple_set;
+for (int i = 0; i < static_cast(tup_info->fields.size()); i++) {
+  int curr_field = get_fresh_idx();
+  tuple_set.push_back({curr_field});
+  if (auto* nested_tup_info 

Re: [PR] [Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy [tvm]

2024-01-12 Thread via GitHub


masahi merged PR #16390:
URL: https://github.com/apache/tvm/pull/16390


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch main updated: [Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy (#16390)

2024-01-12 Thread masahi
This is an automated email from the ASF dual-hosted git repository.

masahi pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
 new 196b413813 [Relay][Frontend][Torch] fix a typo mistake in 
nonzero_numpy (#16390)
196b413813 is described below

commit 196b413813ea6b5e85720118c9aea1fe043a81fb
Author: TaoMiao 
AuthorDate: Sat Jan 13 03:22:18 2024 +0800

[Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy (#16390)

fix a typo mistake in pytorch frontend nonzero_numpy
---
 python/tvm/relay/frontend/pytorch.py  | 2 +-
 tests/python/frontend/pytorch/test_forward.py | 1 +
 2 files changed, 2 insertions(+), 1 deletion(-)

diff --git a/python/tvm/relay/frontend/pytorch.py 
b/python/tvm/relay/frontend/pytorch.py
index 0213dcc488..b9650e6e9a 100644
--- a/python/tvm/relay/frontend/pytorch.py
+++ b/python/tvm/relay/frontend/pytorch.py
@@ -2680,7 +2680,7 @@ class PyTorchOpConverter:
 return ret
 
 def nonzero_numpy(self, inputs, input_types):
-return self.nonzero(inputs, input_types, is_numpy_style=False)
+return self.nonzero(inputs, input_types, is_numpy_style=True)
 
 def scatter(self, inputs, input_types):
 assert len(inputs) == 4 or len(inputs) == 5, (
diff --git a/tests/python/frontend/pytorch/test_forward.py 
b/tests/python/frontend/pytorch/test_forward.py
index 6178a58b6d..9bf40cfcdd 100644
--- a/tests/python/frontend/pytorch/test_forward.py
+++ b/tests/python/frontend/pytorch/test_forward.py
@@ -4445,6 +4445,7 @@ def test_forward_nonzero():
 
 inp = torch.Tensor(np.array([[0, 1, 0], [2, 0, 9], [-1, -1, 
0]]).astype("float32"))
 verify_trace_model(Nonzero(), [inp], ["llvm"])
+verify_trace_model(Nonzero(as_tuple=True), [inp], ["llvm"])
 
 
 def test_forward_scatter():



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450830056


##
tests/python/relax/test_dataflow_inplace.py:
##
@@ -0,0 +1,464 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from typing import List, Set, Tuple
+import tvm
+from tvm import relax, testing
+from tvm.relax.transform import DataflowUseInplaceCalls
+from tvm.relax.testing.transform import (
+dataflow_liveness_analysis,
+dataflow_alias_analysis,
+dataflow_inplace_analysis,
+dataflow_single_inplace_call,
+)
+from tvm.script.parser import ir as I, relax as R, tir as T
+
+import numpy as np
+
+
+def test_liveness_analysis():
+@I.ir_module
+class BasicLiveness:
+@R.function
+def main(x: R.Tensor((), "int32")) -> R.Tensor((), "int32"):
+with R.dataflow():
+y = R.const(1, dtype="int32")
+z = R.add(x, y)
+q = R.multiply(z, y)
+p = R.add(z, q)
+n = R.multiply(p, p)
+R.output(n)
+return n
+
+block = BasicLiveness["main"].body.blocks[0]
+live_ranges = dataflow_liveness_analysis(block)
+expected_ranges = {
+"x": (-1, 1),
+"y": (0, 2),
+"z": (1, 3),
+"q": (2, 3),
+"p": (3, 4),
+"n": (4, 5),
+}
+for var, live_range in live_ranges.items():

Review Comment:
   Good observation.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450829900


##
python/tvm/relax/testing/transform.py:
##
@@ -128,3 +129,62 @@ def transform(self):
 def ApplyEmptyCppMutator() -> tvm.ir.transform.Pass:
 packed_func = 
tvm.get_global_func("relax.testing.transform.ApplyEmptyCppMutator")
 return packed_func()
+
+
+def dataflow_liveness_analysis(block: DataflowBlock) -> Dict[Var, Tuple[int, 
int]]:
+"""
+Inner function for the dataflow inplace transformation exposed for testing.
+"""
+live_ranges = 
tvm.get_global_func("relax.testing.transform.DataflowLivenessAnalysis")(

Review Comment:
   Good idea, that's a handy feature.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Replace eligible operators with in-place versions in dataflow blocks [tvm]

2024-01-12 Thread via GitHub


slyubomirsky commented on code in PR #16129:
URL: https://github.com/apache/tvm/pull/16129#discussion_r1450829603


##
include/tvm/relax/transform.h:
##
@@ -526,6 +526,15 @@ TVM_DLL Pass ConvertLayout(Map> 
desired_layouts);
  */
 TVM_DLL Pass DeadCodeElimination(Array entry_functions);
 
+/*!
+ * \brief Pass that changes calls to supported operators in dataflow blocks 
into in-place
+ * implementations. Supported operators will be replaced by calls to 
`call_tir_inplace` that invoke

Review Comment:
   No, it refers to supported elementwise ops. We could, in principle, analyze 
PrimFuncs to see if they can be made in-place (I haven't thought about what 
that would entail) but that's not what's being done here. I'll clarify the 
comment.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch main updated: [RUNTIME][RPC] Enable RPCObjectRef return in RPC (#16387)

2024-01-12 Thread syfeng
This is an automated email from the ASF dual-hosted git repository.

syfeng pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/main by this push:
 new 4258c864b9 [RUNTIME][RPC] Enable RPCObjectRef return in RPC (#16387)
4258c864b9 is described below

commit 4258c864b91f1b0b5cffc5ba792a331998f793bd
Author: Tianqi Chen 
AuthorDate: Fri Jan 12 11:53:19 2024 -0500

[RUNTIME][RPC] Enable RPCObjectRef return in RPC (#16387)

[Runtime] Enable RPCObjectRef return in RPC

This PR enables RPCObjectRef return object similar to the disco 
transporation.
This allows us to do advanced remote debugging when remote vm requires
advanced object input like kv cache and shape.

To keep the implementation with minRPC(used in some of the limited 
protocols) forn now,
we only support RPCObjectRef for now and do not enable unpacking Shape and 
String.
---
 include/tvm/runtime/object.h |  4 ++-
 src/runtime/minrpc/minrpc_server.h   | 15 --
 src/runtime/minrpc/rpc_reference.h   |  8 +
 src/runtime/rpc/rpc_endpoint.cc  | 51 +++-
 src/runtime/rpc/rpc_local_session.cc | 20 +++--
 src/runtime/rpc/rpc_module.cc|  7 +
 src/runtime/rpc/rpc_session.h| 51 +++-
 tests/python/runtime/test_runtime_rpc.py | 31 +++
 8 files changed, 174 insertions(+), 13 deletions(-)

diff --git a/include/tvm/runtime/object.h b/include/tvm/runtime/object.h
index 94644d797c..92f477b058 100644
--- a/include/tvm/runtime/object.h
+++ b/include/tvm/runtime/object.h
@@ -72,8 +72,10 @@ struct TypeIndex {
 kRuntimeShapeTuple = 6,
 /*! \brief runtime::PackedFunc. */
 kRuntimePackedFunc = 7,
-/*! \brief runtime::DRef */
+/*! \brief runtime::DRef for disco distributed runtime */
 kRuntimeDiscoDRef = 8,
+/*! \brief runtime::RPCObjectRef */
+kRuntimeRPCObjectRef = 9,
 // static assignments that may subject to change.
 kRuntimeClosure,
 kRuntimeADT,
diff --git a/src/runtime/minrpc/minrpc_server.h 
b/src/runtime/minrpc/minrpc_server.h
index cca47f80b9..96a4dbce79 100644
--- a/src/runtime/minrpc/minrpc_server.h
+++ b/src/runtime/minrpc/minrpc_server.h
@@ -206,7 +206,8 @@ class MinRPCExecute : public MinRPCExecInterface {
 ret_tcode[1] = kTVMBytes;
 ret_handler_->ReturnPackedSeq(ret_value, ret_tcode, 2);
 
TVMByteArrayFree(reinterpret_cast(ret_value[1].v_handle));  // 
NOLINT(*)
-  } else if (rv_tcode == kTVMPackedFuncHandle || rv_tcode == 
kTVMModuleHandle) {
+  } else if (rv_tcode == kTVMPackedFuncHandle || rv_tcode == 
kTVMModuleHandle ||
+ rv_tcode == kTVMObjectHandle) {
 ret_tcode[1] = kTVMOpaqueHandle;
 ret_handler_->ReturnPackedSeq(ret_value, ret_tcode, 2);
   } else {
@@ -755,7 +756,17 @@ class MinRPCServer {
   }
 
   void ReadObject(int* tcode, TVMValue* value) {
-this->ThrowError(RPCServerStatus::kUnknownTypeCode);
+// handles RPCObject in minRPC
+// NOTE: object needs to be supported by C runtime
+// because minrpc's restriction of C only
+// we only handle RPCObjectRef
+uint32_t type_index;
+Read(&type_index);
+MINRPC_CHECK(type_index == kRuntimeRPCObjectRefTypeIndex);
+uint64_t object_handle;
+Read(&object_handle);
+tcode[0] = kTVMObjectHandle;
+value[0].v_handle = reinterpret_cast(object_handle);
   }
 
  private:
diff --git a/src/runtime/minrpc/rpc_reference.h 
b/src/runtime/minrpc/rpc_reference.h
index e16f09cb9d..732b017e44 100644
--- a/src/runtime/minrpc/rpc_reference.h
+++ b/src/runtime/minrpc/rpc_reference.h
@@ -33,6 +33,14 @@ class Object;
 /*! \brief The current RPC procotol version. */
 constexpr const char* kRPCProtocolVer = "0.8.0";
 
+/*!
+ * \brief type index of kRuntimeRPCObjectRefTypeIndex
+ * \note this needs to be kept consistent with runtime/object.h
+ * but we explicitly declare it here because minrpc needs to be minimum dep
+ * only c C API
+ */
+constexpr const int kRuntimeRPCObjectRefTypeIndex = 9;
+
 // When tvm.rpc.server.GetCRTMaxPacketSize global function is not registered.
 const uint64_t kRPCMaxTransferSizeBytesDefault = UINT64_MAX;
 
diff --git a/src/runtime/rpc/rpc_endpoint.cc b/src/runtime/rpc/rpc_endpoint.cc
index f2c09132fc..2c431cdb64 100644
--- a/src/runtime/rpc/rpc_endpoint.cc
+++ b/src/runtime/rpc/rpc_endpoint.cc
@@ -175,8 +175,11 @@ class RPCEndpoint::EventHandler : public dmlc::Stream {
 for (int i = 0; i < num_args; ++i) {
   int tcode = type_codes[i];
   if (tcode == kTVMObjectHandle || tcode == kTVMObjectRValueRefArg) {
-LOG(FATAL) << "ValueError: Cannot pass argument " << i << ", type "
-   << args[i].AsObjectRef()->GetTypeKey() << " is 
not supported by RPC";
+if (!args[i].IsObjectRef()) {
+  LOG(FATAL) << "ValueError: Cannot pass ar

Re: [PR] [RUNTIME][RPC] Enable RPCObjectRef return in RPC [tvm]

2024-01-12 Thread via GitHub


Hzfengsy merged PR #16387:
URL: https://github.com/apache/tvm/pull/16387


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch unity updated: [Unity][BlockBuilder] Restore bb.get() (#16378)

2024-01-12 Thread tqchen
This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a commit to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git


The following commit(s) were added to refs/heads/unity by this push:
 new 138cb651e0 [Unity][BlockBuilder] Restore bb.get() (#16378)
138cb651e0 is described below

commit 138cb651e01552544c9fce60ea1e7f7d4db08349
Author: Yixin Dong 
AuthorDate: Fri Jan 12 22:23:29 2024 +0800

[Unity][BlockBuilder] Restore bb.get() (#16378)

* finalize

* fix ci
---
 include/tvm/relax/block_builder.h|  6 +-
 python/tvm/relax/block_builder.py| 13 ++---
 tests/python/relax/test_blockbuilder_core.py | 12 
 3 files changed, 19 insertions(+), 12 deletions(-)

diff --git a/include/tvm/relax/block_builder.h 
b/include/tvm/relax/block_builder.h
index 4272b3f75e..a1e5a6bc31 100644
--- a/include/tvm/relax/block_builder.h
+++ b/include/tvm/relax/block_builder.h
@@ -87,7 +87,11 @@ class BlockBuilderNode : public Object {
* GlobalVars in the IRModule to ensure name uniqueness and the invariant:
* every public function has the same name as its "global_symbol" attribute.
*
-   * \return The IRModule in this BlockBuilder.
+   * \note this method should be called only once at the end of the building 
process, since it may
+   * invalidate global vars previously returned by this builder. See also
+   * transform::NormalizeGlobalVar.
+   *
+   * \return The result IRModule.
*/
   virtual IRModule Finalize() = 0;
 
diff --git a/python/tvm/relax/block_builder.py 
b/python/tvm/relax/block_builder.py
index 0a408578df..b4206f76f4 100644
--- a/python/tvm/relax/block_builder.py
+++ b/python/tvm/relax/block_builder.py
@@ -22,7 +22,6 @@ from typing import Any, Callable, Dict, List, Optional, 
Sequence, Union
 import tvm
 from tvm import relax as rx
 from tvm import tir
-from tvm.ir.base import deprecated
 from tvm.ir.module import IRModule
 from tvm.runtime import Object
 
@@ -654,16 +653,16 @@ class BlockBuilder(Object):
 """
 return _ffi_api.BlockBuilderNormalize(self, expr)  # type: ignore
 
-@deprecated("tvm.relax.BlockBuilder.get", 
"tvm.relax.BlockBuilder.finalize")
 def get(self) -> tvm.IRModule:
-"""Return the IRModule being built.
+"""Return intermediate IRModule. For the situation where the IRModule 
is needed in the
+middle of a building process.
 
 Returns
 ---
 ret : tvm.IRModule
 An IRModule with Relax and TIR functions being built.
 """
-return self.finalize()
+return _ffi_api.BlockBuilderGetContextIRModule(self)  # type: ignore
 
 def finalize(self) -> tvm.IRModule:
 """Finalize the building process and return the result IRModule.
@@ -671,9 +670,9 @@ class BlockBuilder(Object):
 Possibly rename GlobalVars in the IRModule to ensure name uniqueness 
and the invariant:
 every public function has the same name as its "global_symbol" 
attribute.
 
-Note this call may invalidate global vars previously returned by this 
builder
-(see tvm.relax.transform.NormalizeGlobalVar), so it can only be called 
once at the end of
-the building process.
+Note this method should be called only once at the end of the building 
process, since it may
+invalidate global vars previously returned by this builder.
+See also tvm.relax.transform.NormalizeGlobalVar.
 
 Returns
 ---
diff --git a/tests/python/relax/test_blockbuilder_core.py 
b/tests/python/relax/test_blockbuilder_core.py
index 255ef08560..16023c9c91 100644
--- a/tests/python/relax/test_blockbuilder_core.py
+++ b/tests/python/relax/test_blockbuilder_core.py
@@ -707,8 +707,10 @@ def test_finalize_public_private_name_conflict():
 gv1 = bb.emit_te(te_one, primfunc_name_hint="func")
 bb.emit_func_output((gv0, gv1))
 
-mod = bb.finalize()
-assert rx.analysis.well_formed(mod)
+mod = bb.get()
+assert not rx.analysis.well_formed(mod)
+mod_final = bb.finalize()
+assert rx.analysis.well_formed(mod_final)
 
 # relax function call
 bb = rx.BlockBuilder()
@@ -724,8 +726,10 @@ def test_finalize_public_private_name_conflict():
 gv0 = bb.emit(rx.Call(gvar1, []))
 bb.emit_func_output(gv0)
 
-mod = bb.finalize()
-assert rx.analysis.well_formed(mod)
+mod = bb.get()
+assert not rx.analysis.well_formed(mod)
+mod_final = bb.finalize()
+assert rx.analysis.well_formed(mod_final)
 
 
 def test_emit_nested_seqexpr_in_binding_block():



Re: [PR] [Unity][BlockBuilder] Restore bb.get() [tvm]

2024-01-12 Thread via GitHub


tqchen merged PR #16378:
URL: https://github.com/apache/tvm/pull/16378


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Unity][Transform] Implement relax.transform.ExpandMatmulOfSum [tvm]

2024-01-12 Thread via GitHub


Lunderberg commented on PR #16313:
URL: https://github.com/apache/tvm/pull/16313#issuecomment-1889336494

   Yup.  I'd see this transformation as a tool in a toolbox, to be used in 
optimization pipelines as needed, but not something to be part of a general 
flow without further testing.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch unity updated (4c7c010513 -> 7798e93529)

2024-01-12 Thread tqchen
This is an automated email from the ASF dual-hosted git repository.

tqchen pushed a change to branch unity
in repository https://gitbox.apache.org/repos/asf/tvm.git


from 4c7c010513 [Unity][Transform] Implement 
relax.transform.AdjustMatmulOrder (#16314)
 add 7798e93529 [Unity] Support TIR kernel for PagedKVCache (#16374)

No new revisions were added by this update.

Summary of changes:
 src/runtime/relax_vm/paged_kv_cache.cc |  116 ++-
 ...est_runtime_builtin_paged_attention_kv_cache.py |   15 +-
 ...runtime_builtin_paged_attention_kv_cache_tir.py | 1066 
 3 files changed, 1157 insertions(+), 40 deletions(-)
 create mode 100644 
tests/python/relax/test_runtime_builtin_paged_attention_kv_cache_tir.py



Re: [PR] [Unity] Support TIR kernel for PagedKVCache [tvm]

2024-01-12 Thread via GitHub


tqchen merged PR #16374:
URL: https://github.com/apache/tvm/pull/16374


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



[PR] [Relax][Frontend][ONNX]fix onnx frontend parse [tvm]

2024-01-12 Thread via GitHub


chengven027-intellif opened a new pull request, #16395:
URL: https://github.com/apache/tvm/pull/16395

   _parse_graph_initializers should be first than _parse_graph_input.
   Because the conditional statement in _parse_graph_input requires the use of 
self._nodes, and self._nodes is parsed and populated in 
_parse_graph_initializers, otherwise self._nodes will remain empty.


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



(tvm) branch echuraev/fix_cmake_issue_for_opencl_tests created (now 4eb9d880fd)

2024-01-12 Thread echuraev
This is an automated email from the ASF dual-hosted git repository.

echuraev pushed a change to branch echuraev/fix_cmake_issue_for_opencl_tests
in repository https://gitbox.apache.org/repos/asf/tvm.git


  at 4eb9d880fd [OpenCL] Fix OpenCL tests compilation

This branch includes the following new commits:

 new 4eb9d880fd [OpenCL] Fix OpenCL tests compilation

The 1 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.




(tvm) 01/01: [OpenCL] Fix OpenCL tests compilation

2024-01-12 Thread echuraev
This is an automated email from the ASF dual-hosted git repository.

echuraev pushed a commit to branch echuraev/fix_cmake_issue_for_opencl_tests
in repository https://gitbox.apache.org/repos/asf/tvm.git

commit 4eb9d880fd049730f452a651419bf73b7abdab1b
Author: Egor Churaev 
AuthorDate: Fri Jan 12 14:56:10 2024 +0300

[OpenCL] Fix OpenCL tests compilation

Found a problem when you are in a different cmake project (not TVM) and you 
run TVM build with OpenCL tests, then `CMAKE_SOURCE_DIR` returns the path to 
the `CMakeList.txt` in the current project (not to the TVM) and in this case we 
will see the following error: `No SOURCES given to target: opencl-cpptest`.

To be consistent with code style in `OpenCL.cmake`, I removed the usage of 
`CMAKE_SOURCE_DIR` variable. It also fixes the issue if TVM cmake was called 
from directory with another cmake project.
---
 cmake/modules/OpenCL.cmake | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/cmake/modules/OpenCL.cmake b/cmake/modules/OpenCL.cmake
index 2dc1fc18f3..ddcd1e4190 100644
--- a/cmake/modules/OpenCL.cmake
+++ b/cmake/modules/OpenCL.cmake
@@ -81,7 +81,7 @@ if(USE_OPENCL)
 if(Build_OpenCL_GTests)
 message(STATUS "Building OpenCL-Gtests")
 tvm_file_glob(GLOB_RECURSE OPENCL_TEST_SRCS
-  "${CMAKE_SOURCE_DIR}/tests/cpp-runtime/opencl/*.cc"
+  "tests/cpp-runtime/opencl/*.cc"
 )
 add_executable(opencl-cpptest ${OPENCL_TEST_SRCS})
 target_link_libraries(opencl-cpptest PRIVATE gtest_main tvm_runtime)



Re: [PR] [Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy [tvm]

2024-01-12 Thread via GitHub


t-vi commented on code in PR #16390:
URL: https://github.com/apache/tvm/pull/16390#discussion_r1450209720


##
tests/python/frontend/pytorch/test_nonzero_numpy.py:
##
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=import-self, too-many-lines, len-as-condition, 
no-else-return, unused-variable, too-many-nested-blocks
+# pylint: disable=consider-iterating-dictionary, invalid-name, 
unused-argument, unused-variable, broad-except
+# pylint: disable=import-outside-toplevel, simplifiable-if-expression, 
cell-var-from-loop, unnecessary-lambda
+# pylint: disable=missing-function-docstring, redefined-builtin, 
use-implicit-booleaness-not-comparison
+"""Tests to ensure nonzero_numpy are correctly"""
+import torch
+from torch import nn
+
+import tvm
+
+
+class NonZeroModule(nn.Module):
+"""Module that performs nonzero"""
+
+def __init__(self):
+super().__init__()
+
+def forward(self, x, mask):
+mask_index = torch.nonzero(mask, as_tuple=True)
+x[mask_index] = torch.ones_like(x[mask_index])
+return x
+
+
+def test_pytorch_nonzero():
+model = NonZeroModule()
+x = torch.zeros((2, 10), dtype=torch.float32)
+mask = torch.randint(0, 2, (2, 10)).bool()
+with torch.no_grad():
+traced_torch_model = torch.jit.trace(model, (x, mask))
+import_input = [("input0", (2, 10)), ("input1", (2, 10))]
+relay_model_ir, relay_model_params = tvm.relay.frontend.from_pytorch(
+traced_torch_model, import_input
+)

Review Comment:
   Thank you,



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [I] [VOTE] Transition Main to Unity [tvm]

2024-01-12 Thread via GitHub


shifeiwen commented on issue #16368:
URL: https://github.com/apache/tvm/issues/16368#issuecomment-1888797721

   +1


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy [tvm]

2024-01-12 Thread via GitHub


taomiao commented on code in PR #16390:
URL: https://github.com/apache/tvm/pull/16390#discussion_r1450136577


##
tests/python/frontend/pytorch/test_nonzero_numpy.py:
##
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=import-self, too-many-lines, len-as-condition, 
no-else-return, unused-variable, too-many-nested-blocks
+# pylint: disable=consider-iterating-dictionary, invalid-name, 
unused-argument, unused-variable, broad-except
+# pylint: disable=import-outside-toplevel, simplifiable-if-expression, 
cell-var-from-loop, unnecessary-lambda
+# pylint: disable=missing-function-docstring, redefined-builtin, 
use-implicit-booleaness-not-comparison
+"""Tests to ensure nonzero_numpy are correctly"""
+import torch
+from torch import nn
+
+import tvm
+
+
+class NonZeroModule(nn.Module):
+"""Module that performs nonzero"""
+
+def __init__(self):
+super().__init__()
+
+def forward(self, x, mask):
+mask_index = torch.nonzero(mask, as_tuple=True)
+x[mask_index] = torch.ones_like(x[mask_index])
+return x
+
+
+def test_pytorch_nonzero():
+model = NonZeroModule()
+x = torch.zeros((2, 10), dtype=torch.float32)
+mask = torch.randint(0, 2, (2, 10)).bool()
+with torch.no_grad():
+traced_torch_model = torch.jit.trace(model, (x, mask))
+import_input = [("input0", (2, 10)), ("input1", (2, 10))]
+relay_model_ir, relay_model_params = tvm.relay.frontend.from_pytorch(
+traced_torch_model, import_input
+)

Review Comment:
   I see there is a test "test_forward_nonzero" (test_forward.py:4435). added a 
line there



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org



Re: [PR] [Relay][Frontend][Torch] fix a typo mistake in nonzero_numpy [tvm]

2024-01-12 Thread via GitHub


taomiao commented on code in PR #16390:
URL: https://github.com/apache/tvm/pull/16390#discussion_r1450107336


##
tests/python/frontend/pytorch/test_nonzero_numpy.py:
##
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# pylint: disable=import-self, too-many-lines, len-as-condition, 
no-else-return, unused-variable, too-many-nested-blocks
+# pylint: disable=consider-iterating-dictionary, invalid-name, 
unused-argument, unused-variable, broad-except
+# pylint: disable=import-outside-toplevel, simplifiable-if-expression, 
cell-var-from-loop, unnecessary-lambda
+# pylint: disable=missing-function-docstring, redefined-builtin, 
use-implicit-booleaness-not-comparison
+"""Tests to ensure nonzero_numpy are correctly"""
+import torch
+from torch import nn
+
+import tvm
+
+
+class NonZeroModule(nn.Module):
+"""Module that performs nonzero"""
+
+def __init__(self):
+super().__init__()
+
+def forward(self, x, mask):
+mask_index = torch.nonzero(mask, as_tuple=True)
+x[mask_index] = torch.ones_like(x[mask_index])
+return x
+
+
+def test_pytorch_nonzero():
+model = NonZeroModule()
+x = torch.zeros((2, 10), dtype=torch.float32)
+mask = torch.randint(0, 2, (2, 10)).bool()
+with torch.no_grad():
+traced_torch_model = torch.jit.trace(model, (x, mask))
+import_input = [("input0", (2, 10)), ("input1", (2, 10))]
+relay_model_ir, relay_model_params = tvm.relay.frontend.from_pytorch(
+traced_torch_model, import_input
+)

Review Comment:
   ok



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscr...@tvm.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org