[incubator-mxnet] 42/42: Fix build failure

2019-07-26 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 47f4cd39a279d2c14d4b590d64fc0099695b7c1e
Author: reminisce 
AuthorDate: Wed Jul 17 13:14:27 2019 +0800

Fix build failure
---
 python/mxnet/gluon/loss.py  |  2 -
 python/mxnet/gluon/nn/basic_layers.py   |  4 +-
 python/mxnet/gluon/utils.py | 52 +---
 python/mxnet/numpy_extension/__init__.py|  5 +-
 python/mxnet/test_utils.py  |  1 +
 src/operator/numpy/np_init_op.cc|  2 +-
 src/operator/numpy/np_init_op.cu|  2 +-
 tests/python/unittest/test_contrib_amp.py   | 86 --
 tests/python/unittest/test_numpy_gluon.py   |  7 ++-
 tests/python/unittest/test_numpy_ndarray.py | 24 
 tests/python/unittest/test_numpy_op.py  | 94 ++---
 11 files changed, 66 insertions(+), 213 deletions(-)

diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index d634e79..d2e2344 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -29,7 +29,6 @@ import numpy as np
 from .. import ndarray
 from ..base import numeric_types
 from .block import HybridBlock
-from .utils import _adapt_np_array
 from ..util import is_np_array
 
 
@@ -188,7 +187,6 @@ class L1Loss(Loss):
 def __init__(self, weight=None, batch_axis=0, **kwargs):
 super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
 
-@_adapt_np_array
 def hybrid_forward(self, F, pred, label, sample_weight=None):
 label = _reshape_like(F, label, pred)
 loss = F.abs(label - pred)
diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index d7f599d..fb0b62e 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -26,7 +26,7 @@ import numpy as np
 
 from .activations import Activation
 from ..block import Block, HybridBlock
-from ..utils import _indent, _adapt_np_array
+from ..utils import _indent
 from ... import nd, sym
 from ...util import is_np_array
 
@@ -522,7 +522,6 @@ class InstanceNorm(HybridBlock):
 shape=(in_channels,), 
init=beta_initializer,
 allow_deferred_init=True)
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, gamma, beta):
 if self._axis == 1:
 return F.InstanceNorm(x, gamma, beta,
@@ -795,7 +794,6 @@ class HybridLambda(HybridBlock):
 "Unrecognized function in lambda: {} of type {}"
 .format(function, type(function)))
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, *args):
 return self._func(F, x, *args)
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index bf5d43b..c79b5e3 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,7 +40,7 @@ except ImportError:
 import numpy as np
 
 from .. import ndarray
-from ..util import is_np_shape, is_np_array, wraps_safely
+from ..util import is_np_shape, is_np_array
 from .. import numpy as _mx_np  # pylint: disable=reimported
 
 
@@ -484,53 +484,3 @@ def _check_all_np_ndarrays(out):
 for i in out:
 _check_all_np_ndarrays(i)
 # pylint: enable=no-else-raise
-
-
-def _to_classic_arrays(*args, **kwargs):
-"""Convert arrays to classic arrays. This is used in a Gluon layer for 
converting
-inputs of np arrays to classic arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-from ..numpy import ndarray as np_ndarray
-from ..symbol.numpy import _Symbol as np_symbol
-num_inputs = len(args)
-assert num_inputs != 0
-if not is_np_array():
-return args, kwargs
-in_arrs = [arr if arr is None else arr.as_nd_ndarray() for arr in args]
-new_kwargs = {}
-for k, v in kwargs.items():
-if isinstance(v, (np_ndarray, np_symbol)):
-new_kwargs[k] = v.as_nd_ndarray()
-else:
-new_kwargs[k] = v
-return in_arrs, new_kwargs
-
-
-def _to_np_arrays(*args):
-"""Convert arrays to np arrays. This is used in a Gluon layer for 
converting
-outputs of classic arrays to np arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-num_outputs = len(args)
-assert num_outputs != 0
-if not is_np_array():
-return args[0] if num_outputs == 1 else args
-out = [arr.as_np_ndarray() for arr in args]
-return out[0] if num_outputs == 1 else out
-
-
-# TODO(junwu): This is a temp solution for allowing basic layers
-# implemented using legacy ops to accept np.ndarrays as inputs and return
-# np.ndarrays as outputs. We should remove it after changing all the layers
-# to use np ops in np_array semantics in the future.
-def _adapt_np_array(func):
-@wraps_safely(func)

[incubator-mxnet] 42/42: Fix build failure

2019-07-24 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 07d7a946003a1038a7cfc1f35d396bd5d1ec5a29
Author: reminisce 
AuthorDate: Wed Jul 17 13:14:27 2019 +0800

Fix build failure
---
 python/mxnet/gluon/loss.py  |  2 -
 python/mxnet/gluon/nn/basic_layers.py   |  4 +-
 python/mxnet/gluon/utils.py | 52 +---
 python/mxnet/numpy_extension/__init__.py|  5 +-
 python/mxnet/test_utils.py  |  1 +
 src/operator/numpy/np_init_op.cc|  2 +-
 src/operator/numpy/np_init_op.cu|  2 +-
 tests/python/unittest/test_contrib_amp.py   | 86 --
 tests/python/unittest/test_numpy_gluon.py   |  7 ++-
 tests/python/unittest/test_numpy_ndarray.py | 24 
 tests/python/unittest/test_numpy_op.py  | 94 ++---
 11 files changed, 66 insertions(+), 213 deletions(-)

diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index d634e79..d2e2344 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -29,7 +29,6 @@ import numpy as np
 from .. import ndarray
 from ..base import numeric_types
 from .block import HybridBlock
-from .utils import _adapt_np_array
 from ..util import is_np_array
 
 
@@ -188,7 +187,6 @@ class L1Loss(Loss):
 def __init__(self, weight=None, batch_axis=0, **kwargs):
 super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
 
-@_adapt_np_array
 def hybrid_forward(self, F, pred, label, sample_weight=None):
 label = _reshape_like(F, label, pred)
 loss = F.abs(label - pred)
diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index d7f599d..fb0b62e 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -26,7 +26,7 @@ import numpy as np
 
 from .activations import Activation
 from ..block import Block, HybridBlock
-from ..utils import _indent, _adapt_np_array
+from ..utils import _indent
 from ... import nd, sym
 from ...util import is_np_array
 
@@ -522,7 +522,6 @@ class InstanceNorm(HybridBlock):
 shape=(in_channels,), 
init=beta_initializer,
 allow_deferred_init=True)
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, gamma, beta):
 if self._axis == 1:
 return F.InstanceNorm(x, gamma, beta,
@@ -795,7 +794,6 @@ class HybridLambda(HybridBlock):
 "Unrecognized function in lambda: {} of type {}"
 .format(function, type(function)))
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, *args):
 return self._func(F, x, *args)
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index bf5d43b..c79b5e3 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,7 +40,7 @@ except ImportError:
 import numpy as np
 
 from .. import ndarray
-from ..util import is_np_shape, is_np_array, wraps_safely
+from ..util import is_np_shape, is_np_array
 from .. import numpy as _mx_np  # pylint: disable=reimported
 
 
@@ -484,53 +484,3 @@ def _check_all_np_ndarrays(out):
 for i in out:
 _check_all_np_ndarrays(i)
 # pylint: enable=no-else-raise
-
-
-def _to_classic_arrays(*args, **kwargs):
-"""Convert arrays to classic arrays. This is used in a Gluon layer for 
converting
-inputs of np arrays to classic arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-from ..numpy import ndarray as np_ndarray
-from ..symbol.numpy import _Symbol as np_symbol
-num_inputs = len(args)
-assert num_inputs != 0
-if not is_np_array():
-return args, kwargs
-in_arrs = [arr if arr is None else arr.as_nd_ndarray() for arr in args]
-new_kwargs = {}
-for k, v in kwargs.items():
-if isinstance(v, (np_ndarray, np_symbol)):
-new_kwargs[k] = v.as_nd_ndarray()
-else:
-new_kwargs[k] = v
-return in_arrs, new_kwargs
-
-
-def _to_np_arrays(*args):
-"""Convert arrays to np arrays. This is used in a Gluon layer for 
converting
-outputs of classic arrays to np arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-num_outputs = len(args)
-assert num_outputs != 0
-if not is_np_array():
-return args[0] if num_outputs == 1 else args
-out = [arr.as_np_ndarray() for arr in args]
-return out[0] if num_outputs == 1 else out
-
-
-# TODO(junwu): This is a temp solution for allowing basic layers
-# implemented using legacy ops to accept np.ndarrays as inputs and return
-# np.ndarrays as outputs. We should remove it after changing all the layers
-# to use np ops in np_array semantics in the future.
-def _adapt_np_array(func):
-@wraps_safely(func)

[incubator-mxnet] 42/42: Fix build failure

2019-07-22 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit a23120e02d5ad3d46e855ee4d044572ed7f0ca55
Author: reminisce 
AuthorDate: Wed Jul 17 13:14:27 2019 +0800

Fix build failure
---
 python/mxnet/gluon/loss.py  |  2 -
 python/mxnet/gluon/nn/basic_layers.py   |  4 +-
 python/mxnet/gluon/utils.py | 52 +---
 python/mxnet/numpy_extension/__init__.py|  5 +-
 python/mxnet/test_utils.py  |  1 +
 src/operator/numpy/np_init_op.cc|  2 +-
 src/operator/numpy/np_init_op.cu|  2 +-
 tests/python/unittest/test_contrib_amp.py   | 86 --
 tests/python/unittest/test_numpy_gluon.py   |  7 ++-
 tests/python/unittest/test_numpy_ndarray.py | 24 
 tests/python/unittest/test_numpy_op.py  | 94 ++---
 11 files changed, 66 insertions(+), 213 deletions(-)

diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index d634e79..d2e2344 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -29,7 +29,6 @@ import numpy as np
 from .. import ndarray
 from ..base import numeric_types
 from .block import HybridBlock
-from .utils import _adapt_np_array
 from ..util import is_np_array
 
 
@@ -188,7 +187,6 @@ class L1Loss(Loss):
 def __init__(self, weight=None, batch_axis=0, **kwargs):
 super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
 
-@_adapt_np_array
 def hybrid_forward(self, F, pred, label, sample_weight=None):
 label = _reshape_like(F, label, pred)
 loss = F.abs(label - pred)
diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index d7f599d..fb0b62e 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -26,7 +26,7 @@ import numpy as np
 
 from .activations import Activation
 from ..block import Block, HybridBlock
-from ..utils import _indent, _adapt_np_array
+from ..utils import _indent
 from ... import nd, sym
 from ...util import is_np_array
 
@@ -522,7 +522,6 @@ class InstanceNorm(HybridBlock):
 shape=(in_channels,), 
init=beta_initializer,
 allow_deferred_init=True)
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, gamma, beta):
 if self._axis == 1:
 return F.InstanceNorm(x, gamma, beta,
@@ -795,7 +794,6 @@ class HybridLambda(HybridBlock):
 "Unrecognized function in lambda: {} of type {}"
 .format(function, type(function)))
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, *args):
 return self._func(F, x, *args)
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index bf5d43b..c79b5e3 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,7 +40,7 @@ except ImportError:
 import numpy as np
 
 from .. import ndarray
-from ..util import is_np_shape, is_np_array, wraps_safely
+from ..util import is_np_shape, is_np_array
 from .. import numpy as _mx_np  # pylint: disable=reimported
 
 
@@ -484,53 +484,3 @@ def _check_all_np_ndarrays(out):
 for i in out:
 _check_all_np_ndarrays(i)
 # pylint: enable=no-else-raise
-
-
-def _to_classic_arrays(*args, **kwargs):
-"""Convert arrays to classic arrays. This is used in a Gluon layer for 
converting
-inputs of np arrays to classic arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-from ..numpy import ndarray as np_ndarray
-from ..symbol.numpy import _Symbol as np_symbol
-num_inputs = len(args)
-assert num_inputs != 0
-if not is_np_array():
-return args, kwargs
-in_arrs = [arr if arr is None else arr.as_nd_ndarray() for arr in args]
-new_kwargs = {}
-for k, v in kwargs.items():
-if isinstance(v, (np_ndarray, np_symbol)):
-new_kwargs[k] = v.as_nd_ndarray()
-else:
-new_kwargs[k] = v
-return in_arrs, new_kwargs
-
-
-def _to_np_arrays(*args):
-"""Convert arrays to np arrays. This is used in a Gluon layer for 
converting
-outputs of classic arrays to np arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-num_outputs = len(args)
-assert num_outputs != 0
-if not is_np_array():
-return args[0] if num_outputs == 1 else args
-out = [arr.as_np_ndarray() for arr in args]
-return out[0] if num_outputs == 1 else out
-
-
-# TODO(junwu): This is a temp solution for allowing basic layers
-# implemented using legacy ops to accept np.ndarrays as inputs and return
-# np.ndarrays as outputs. We should remove it after changing all the layers
-# to use np ops in np_array semantics in the future.
-def _adapt_np_array(func):
-@wraps_safely(func)

[incubator-mxnet] 42/42: Fix build failure

2019-07-18 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 10a03fa3d9a05a3166e2e35010e70120f6b8a1f5
Author: reminisce 
AuthorDate: Wed Jul 17 13:14:27 2019 +0800

Fix build failure
---
 python/mxnet/gluon/loss.py  |  2 -
 python/mxnet/gluon/nn/basic_layers.py   |  4 +-
 python/mxnet/gluon/utils.py | 52 +---
 python/mxnet/numpy_extension/__init__.py|  5 +-
 python/mxnet/test_utils.py  |  1 +
 src/operator/numpy/np_init_op.cc|  2 +-
 src/operator/numpy/np_init_op.cu|  2 +-
 tests/python/unittest/test_contrib_amp.py   | 86 --
 tests/python/unittest/test_numpy_gluon.py   |  7 ++-
 tests/python/unittest/test_numpy_ndarray.py | 24 
 tests/python/unittest/test_numpy_op.py  | 94 ++---
 11 files changed, 66 insertions(+), 213 deletions(-)

diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index d634e79..d2e2344 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -29,7 +29,6 @@ import numpy as np
 from .. import ndarray
 from ..base import numeric_types
 from .block import HybridBlock
-from .utils import _adapt_np_array
 from ..util import is_np_array
 
 
@@ -188,7 +187,6 @@ class L1Loss(Loss):
 def __init__(self, weight=None, batch_axis=0, **kwargs):
 super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
 
-@_adapt_np_array
 def hybrid_forward(self, F, pred, label, sample_weight=None):
 label = _reshape_like(F, label, pred)
 loss = F.abs(label - pred)
diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index 87d6e89..8596742 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -25,7 +25,7 @@ import numpy as np
 
 from .activations import Activation
 from ..block import Block, HybridBlock
-from ..utils import _indent, _adapt_np_array
+from ..utils import _indent
 from ... import nd, sym
 from ...util import is_np_array
 
@@ -521,7 +521,6 @@ class InstanceNorm(HybridBlock):
 shape=(in_channels,), 
init=beta_initializer,
 allow_deferred_init=True)
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, gamma, beta):
 if self._axis == 1:
 return F.InstanceNorm(x, gamma, beta,
@@ -706,7 +705,6 @@ class HybridLambda(HybridBlock):
 "Unrecognized function in lambda: {} of type {}"
 .format(function, type(function)))
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, *args):
 return self._func(F, x, *args)
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index 2822c70..b8e5b26 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,7 +40,7 @@ except ImportError:
 import numpy as np
 
 from .. import ndarray
-from ..util import is_np_shape, is_np_array, wraps_safely
+from ..util import is_np_shape, is_np_array
 from .. import numpy as _mx_np  # pylint: disable=reimported
 
 
@@ -484,53 +484,3 @@ def _check_all_np_ndarrays(out):
 for i in out:
 _check_all_np_ndarrays(i)
 # pylint: enable=no-else-raise
-
-
-def _to_classic_arrays(*args, **kwargs):
-"""Convert arrays to classic arrays. This is used in a Gluon layer for 
converting
-inputs of np arrays to classic arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-from ..numpy import ndarray as np_ndarray
-from ..symbol.numpy import _Symbol as np_symbol
-num_inputs = len(args)
-assert num_inputs != 0
-if not is_np_array():
-return args, kwargs
-in_arrs = [arr if arr is None else arr.as_nd_ndarray() for arr in args]
-new_kwargs = {}
-for k, v in kwargs.items():
-if isinstance(v, (np_ndarray, np_symbol)):
-new_kwargs[k] = v.as_nd_ndarray()
-else:
-new_kwargs[k] = v
-return in_arrs, new_kwargs
-
-
-def _to_np_arrays(*args):
-"""Convert arrays to np arrays. This is used in a Gluon layer for 
converting
-outputs of classic arrays to np arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-num_outputs = len(args)
-assert num_outputs != 0
-if not is_np_array():
-return args[0] if num_outputs == 1 else args
-out = [arr.as_np_ndarray() for arr in args]
-return out[0] if num_outputs == 1 else out
-
-
-# TODO(junwu): This is a temp solution for allowing basic layers
-# implemented using legacy ops to accept np.ndarrays as inputs and return
-# np.ndarrays as outputs. We should remove it after changing all the layers
-# to use np ops in np_array semantics in the future.
-def _adapt_np_array(func):
-@wraps_safely(func)

[incubator-mxnet] 42/42: Fix build failure

2019-07-17 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 76afc2d9f03d5a2fc5bab7d20f49883973e00367
Author: reminisce 
AuthorDate: Wed Jul 17 13:14:27 2019 +0800

Fix build failure
---
 python/mxnet/gluon/loss.py  |  2 -
 python/mxnet/gluon/nn/basic_layers.py   |  4 +-
 python/mxnet/gluon/utils.py | 52 +---
 python/mxnet/numpy_extension/__init__.py|  5 +-
 python/mxnet/test_utils.py  |  1 +
 src/operator/numpy/np_init_op.cc|  2 +-
 src/operator/numpy/np_init_op.cu|  2 +-
 tests/python/unittest/test_contrib_amp.py   | 86 --
 tests/python/unittest/test_numpy_gluon.py   |  7 ++-
 tests/python/unittest/test_numpy_ndarray.py | 24 
 tests/python/unittest/test_numpy_op.py  | 94 ++---
 11 files changed, 66 insertions(+), 213 deletions(-)

diff --git a/python/mxnet/gluon/loss.py b/python/mxnet/gluon/loss.py
index d634e79..d2e2344 100644
--- a/python/mxnet/gluon/loss.py
+++ b/python/mxnet/gluon/loss.py
@@ -29,7 +29,6 @@ import numpy as np
 from .. import ndarray
 from ..base import numeric_types
 from .block import HybridBlock
-from .utils import _adapt_np_array
 from ..util import is_np_array
 
 
@@ -188,7 +187,6 @@ class L1Loss(Loss):
 def __init__(self, weight=None, batch_axis=0, **kwargs):
 super(L1Loss, self).__init__(weight, batch_axis, **kwargs)
 
-@_adapt_np_array
 def hybrid_forward(self, F, pred, label, sample_weight=None):
 label = _reshape_like(F, label, pred)
 loss = F.abs(label - pred)
diff --git a/python/mxnet/gluon/nn/basic_layers.py 
b/python/mxnet/gluon/nn/basic_layers.py
index 87d6e89..8596742 100644
--- a/python/mxnet/gluon/nn/basic_layers.py
+++ b/python/mxnet/gluon/nn/basic_layers.py
@@ -25,7 +25,7 @@ import numpy as np
 
 from .activations import Activation
 from ..block import Block, HybridBlock
-from ..utils import _indent, _adapt_np_array
+from ..utils import _indent
 from ... import nd, sym
 from ...util import is_np_array
 
@@ -521,7 +521,6 @@ class InstanceNorm(HybridBlock):
 shape=(in_channels,), 
init=beta_initializer,
 allow_deferred_init=True)
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, gamma, beta):
 if self._axis == 1:
 return F.InstanceNorm(x, gamma, beta,
@@ -706,7 +705,6 @@ class HybridLambda(HybridBlock):
 "Unrecognized function in lambda: {} of type {}"
 .format(function, type(function)))
 
-@_adapt_np_array
 def hybrid_forward(self, F, x, *args):
 return self._func(F, x, *args)
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index 2822c70..b8e5b26 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,7 +40,7 @@ except ImportError:
 import numpy as np
 
 from .. import ndarray
-from ..util import is_np_shape, is_np_array, wraps_safely
+from ..util import is_np_shape, is_np_array
 from .. import numpy as _mx_np  # pylint: disable=reimported
 
 
@@ -484,53 +484,3 @@ def _check_all_np_ndarrays(out):
 for i in out:
 _check_all_np_ndarrays(i)
 # pylint: enable=no-else-raise
-
-
-def _to_classic_arrays(*args, **kwargs):
-"""Convert arrays to classic arrays. This is used in a Gluon layer for 
converting
-inputs of np arrays to classic arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-from ..numpy import ndarray as np_ndarray
-from ..symbol.numpy import _Symbol as np_symbol
-num_inputs = len(args)
-assert num_inputs != 0
-if not is_np_array():
-return args, kwargs
-in_arrs = [arr if arr is None else arr.as_nd_ndarray() for arr in args]
-new_kwargs = {}
-for k, v in kwargs.items():
-if isinstance(v, (np_ndarray, np_symbol)):
-new_kwargs[k] = v.as_nd_ndarray()
-else:
-new_kwargs[k] = v
-return in_arrs, new_kwargs
-
-
-def _to_np_arrays(*args):
-"""Convert arrays to np arrays. This is used in a Gluon layer for 
converting
-outputs of classic arrays to np arrays so that the layer built with legacy 
ops can still
-be used in np_array semantics."""
-num_outputs = len(args)
-assert num_outputs != 0
-if not is_np_array():
-return args[0] if num_outputs == 1 else args
-out = [arr.as_np_ndarray() for arr in args]
-return out[0] if num_outputs == 1 else out
-
-
-# TODO(junwu): This is a temp solution for allowing basic layers
-# implemented using legacy ops to accept np.ndarrays as inputs and return
-# np.ndarrays as outputs. We should remove it after changing all the layers
-# to use np ops in np_array semantics in the future.
-def _adapt_np_array(func):
-@wraps_safely(func)