[incubator-mxnet] 09/42: Change np_compat to np_shape

2019-07-26 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit fd0cb053e253cbf2d0cd498f79dec96ba30fe155
Author: reminisce 
AuthorDate: Sun May 26 22:41:28 2019 -0700

Change np_compat to np_shape
---
 python/mxnet/gluon/block.py |  2 +-
 python/mxnet/gluon/parameter.py | 10 +-
 python/mxnet/gluon/utils.py |  1 +
 python/mxnet/ndarray/numpy/_op.py   |  3 +--
 python/mxnet/ndarray/register.py|  4 ++--
 python/mxnet/numpy/__init__.py  |  2 +-
 python/mxnet/numpy/multiarray.py|  8 +++-
 tests/python/unittest/test_numpy_gluon.py   |  6 +++---
 tests/python/unittest/test_numpy_ndarray.py | 20 ++--
 tests/python/unittest/test_numpy_op.py  | 16 
 10 files changed, 35 insertions(+), 37 deletions(-)

diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py
index 807f160..1362891 100644
--- a/python/mxnet/gluon/block.py
+++ b/python/mxnet/gluon/block.py
@@ -551,7 +551,7 @@ class Block(object):
 
 for hook in self._forward_hooks.values():
 hook(self, args, out)
-if _mx_np.is_np_compat():
+if _mx_np.is_np_shape():
 _check_all_np_ndarrays(_flatten(out, "output")[0])
 return out
 
diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py
index 307fb15..2d3e8c0 100644
--- a/python/mxnet/gluon/parameter.py
+++ b/python/mxnet/gluon/parameter.py
@@ -31,7 +31,7 @@ from .. import symbol, ndarray, initializer, context
 from ..context import Context, cpu
 from .. import autograd
 from .utils import _indent, _brief_print_list, shape_is_known
-from .. import is_np_shape
+from ..util import is_np_shape
 
 # pylint: disable= invalid-name
 tensor_types = (symbol.Symbol, ndarray.NDArray)
@@ -188,7 +188,7 @@ class Parameter(object):
 if self._shape is None:
 self._shape = new_shape
 return
-unknown_dim_size = -1 if is_np_compat() else 0
+unknown_dim_size = -1 if is_np_shape() else 0
 assert len(self._shape) == len(new_shape) and \
 all(j in (unknown_dim_size, i) for i, j in zip(new_shape, 
self._shape)), \
 "Expected shape %s is incompatible with given shape %s."%(
@@ -330,7 +330,7 @@ class Parameter(object):
 initializer.create(default_init)(
 initializer.InitDesc(self.name, {'__init__': init}), data)
 # TODO(junwu): use np random operators when available
-if is_np_compat():
+if is_np_shape():
 data = data.as_np_ndarray()  # convert to np.ndarray
 
 self._init_impl(data, ctx)
@@ -357,7 +357,7 @@ class Parameter(object):
 self._grad = [ndarray.zeros(shape=i.shape, dtype=i.dtype, 
ctx=i.context,
 stype=self._grad_stype) for i in 
self._data]
 # TODO(junwu): use np.zeros
-if is_np_compat():
+if is_np_shape():
 self._grad = [arr.as_np_ndarray() for arr in self._grad]
 
 autograd.mark_variables(self._check_and_get(self._data, list),
@@ -606,7 +606,7 @@ class Parameter(object):
 self._var = symbol.var(self.name, shape=self.shape, 
dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
-if is_np_compat():
+if is_np_shape():
 self._var = self._var.as_np_ndarray()
 return self._var
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index acfcce2..b21e06d 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,6 +40,7 @@ import numpy as np
 from .. import ndarray
 from ..util import is_np_shape
 
+
 def split_data(data, num_slice, batch_axis=0, even_split=True):
 """Splits an NDArray into `num_slice` slices along `batch_axis`.
 Usually used for data parallelism where each slices is sent
diff --git a/python/mxnet/ndarray/numpy/_op.py 
b/python/mxnet/ndarray/numpy/_op.py
index 725fba4..72b890d 100644
--- a/python/mxnet/ndarray/numpy/_op.py
+++ b/python/mxnet/ndarray/numpy/_op.py
@@ -20,7 +20,7 @@
 from __future__ import absolute_import
 import numpy as _np
 from ...base import numeric_types
-from ...util import _sanity_check_params, use_np_compat, set_module
+from ...util import _sanity_check_params, set_module
 from ...context import current_context
 from . import _internal as _npi
 
@@ -90,7 +90,6 @@ def ones(shape, dtype=None, **kwargs):
 
 
 #pylint: disable= too-many-arguments, no-member, protected-access
-@use_np_compat
 def _ufunc_helper(lhs, rhs, fn_array, fn_scalar, lfn_scalar, rfn_scalar=None, 
out=None):
 """ Helper function for element-wise operation.
 The function will 

[incubator-mxnet] 09/42: Change np_compat to np_shape

2019-07-24 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit b888e4c40aaa3b618cef9393c18cb9de2516bb93
Author: reminisce 
AuthorDate: Sun May 26 22:41:28 2019 -0700

Change np_compat to np_shape
---
 python/mxnet/gluon/block.py |  2 +-
 python/mxnet/gluon/parameter.py | 10 +-
 python/mxnet/gluon/utils.py |  1 +
 python/mxnet/ndarray/numpy/_op.py   |  3 +--
 python/mxnet/ndarray/register.py|  4 ++--
 python/mxnet/numpy/__init__.py  |  2 +-
 python/mxnet/numpy/multiarray.py|  8 +++-
 tests/python/unittest/test_numpy_gluon.py   |  6 +++---
 tests/python/unittest/test_numpy_ndarray.py | 20 ++--
 tests/python/unittest/test_numpy_op.py  | 16 
 10 files changed, 35 insertions(+), 37 deletions(-)

diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py
index 807f160..1362891 100644
--- a/python/mxnet/gluon/block.py
+++ b/python/mxnet/gluon/block.py
@@ -551,7 +551,7 @@ class Block(object):
 
 for hook in self._forward_hooks.values():
 hook(self, args, out)
-if _mx_np.is_np_compat():
+if _mx_np.is_np_shape():
 _check_all_np_ndarrays(_flatten(out, "output")[0])
 return out
 
diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py
index 307fb15..2d3e8c0 100644
--- a/python/mxnet/gluon/parameter.py
+++ b/python/mxnet/gluon/parameter.py
@@ -31,7 +31,7 @@ from .. import symbol, ndarray, initializer, context
 from ..context import Context, cpu
 from .. import autograd
 from .utils import _indent, _brief_print_list, shape_is_known
-from .. import is_np_shape
+from ..util import is_np_shape
 
 # pylint: disable= invalid-name
 tensor_types = (symbol.Symbol, ndarray.NDArray)
@@ -188,7 +188,7 @@ class Parameter(object):
 if self._shape is None:
 self._shape = new_shape
 return
-unknown_dim_size = -1 if is_np_compat() else 0
+unknown_dim_size = -1 if is_np_shape() else 0
 assert len(self._shape) == len(new_shape) and \
 all(j in (unknown_dim_size, i) for i, j in zip(new_shape, 
self._shape)), \
 "Expected shape %s is incompatible with given shape %s."%(
@@ -330,7 +330,7 @@ class Parameter(object):
 initializer.create(default_init)(
 initializer.InitDesc(self.name, {'__init__': init}), data)
 # TODO(junwu): use np random operators when available
-if is_np_compat():
+if is_np_shape():
 data = data.as_np_ndarray()  # convert to np.ndarray
 
 self._init_impl(data, ctx)
@@ -357,7 +357,7 @@ class Parameter(object):
 self._grad = [ndarray.zeros(shape=i.shape, dtype=i.dtype, 
ctx=i.context,
 stype=self._grad_stype) for i in 
self._data]
 # TODO(junwu): use np.zeros
-if is_np_compat():
+if is_np_shape():
 self._grad = [arr.as_np_ndarray() for arr in self._grad]
 
 autograd.mark_variables(self._check_and_get(self._data, list),
@@ -606,7 +606,7 @@ class Parameter(object):
 self._var = symbol.var(self.name, shape=self.shape, 
dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
-if is_np_compat():
+if is_np_shape():
 self._var = self._var.as_np_ndarray()
 return self._var
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index acfcce2..b21e06d 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,6 +40,7 @@ import numpy as np
 from .. import ndarray
 from ..util import is_np_shape
 
+
 def split_data(data, num_slice, batch_axis=0, even_split=True):
 """Splits an NDArray into `num_slice` slices along `batch_axis`.
 Usually used for data parallelism where each slices is sent
diff --git a/python/mxnet/ndarray/numpy/_op.py 
b/python/mxnet/ndarray/numpy/_op.py
index 725fba4..72b890d 100644
--- a/python/mxnet/ndarray/numpy/_op.py
+++ b/python/mxnet/ndarray/numpy/_op.py
@@ -20,7 +20,7 @@
 from __future__ import absolute_import
 import numpy as _np
 from ...base import numeric_types
-from ...util import _sanity_check_params, use_np_compat, set_module
+from ...util import _sanity_check_params, set_module
 from ...context import current_context
 from . import _internal as _npi
 
@@ -90,7 +90,6 @@ def ones(shape, dtype=None, **kwargs):
 
 
 #pylint: disable= too-many-arguments, no-member, protected-access
-@use_np_compat
 def _ufunc_helper(lhs, rhs, fn_array, fn_scalar, lfn_scalar, rfn_scalar=None, 
out=None):
 """ Helper function for element-wise operation.
 The function will 

[incubator-mxnet] 09/42: Change np_compat to np_shape

2019-07-22 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit d8b075adaa9ed8da4f0ffce1b066e14dc9feeae9
Author: reminisce 
AuthorDate: Sun May 26 22:41:28 2019 -0700

Change np_compat to np_shape
---
 python/mxnet/gluon/block.py |  2 +-
 python/mxnet/gluon/parameter.py | 10 +-
 python/mxnet/gluon/utils.py |  1 +
 python/mxnet/ndarray/numpy/_op.py   |  3 +--
 python/mxnet/ndarray/register.py|  4 ++--
 python/mxnet/numpy/__init__.py  |  2 +-
 python/mxnet/numpy/multiarray.py|  8 +++-
 tests/python/unittest/test_numpy_gluon.py   |  6 +++---
 tests/python/unittest/test_numpy_ndarray.py | 20 ++--
 tests/python/unittest/test_numpy_op.py  | 16 
 10 files changed, 35 insertions(+), 37 deletions(-)

diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py
index 807f160..1362891 100644
--- a/python/mxnet/gluon/block.py
+++ b/python/mxnet/gluon/block.py
@@ -551,7 +551,7 @@ class Block(object):
 
 for hook in self._forward_hooks.values():
 hook(self, args, out)
-if _mx_np.is_np_compat():
+if _mx_np.is_np_shape():
 _check_all_np_ndarrays(_flatten(out, "output")[0])
 return out
 
diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py
index 307fb15..2d3e8c0 100644
--- a/python/mxnet/gluon/parameter.py
+++ b/python/mxnet/gluon/parameter.py
@@ -31,7 +31,7 @@ from .. import symbol, ndarray, initializer, context
 from ..context import Context, cpu
 from .. import autograd
 from .utils import _indent, _brief_print_list, shape_is_known
-from .. import is_np_shape
+from ..util import is_np_shape
 
 # pylint: disable= invalid-name
 tensor_types = (symbol.Symbol, ndarray.NDArray)
@@ -188,7 +188,7 @@ class Parameter(object):
 if self._shape is None:
 self._shape = new_shape
 return
-unknown_dim_size = -1 if is_np_compat() else 0
+unknown_dim_size = -1 if is_np_shape() else 0
 assert len(self._shape) == len(new_shape) and \
 all(j in (unknown_dim_size, i) for i, j in zip(new_shape, 
self._shape)), \
 "Expected shape %s is incompatible with given shape %s."%(
@@ -330,7 +330,7 @@ class Parameter(object):
 initializer.create(default_init)(
 initializer.InitDesc(self.name, {'__init__': init}), data)
 # TODO(junwu): use np random operators when available
-if is_np_compat():
+if is_np_shape():
 data = data.as_np_ndarray()  # convert to np.ndarray
 
 self._init_impl(data, ctx)
@@ -357,7 +357,7 @@ class Parameter(object):
 self._grad = [ndarray.zeros(shape=i.shape, dtype=i.dtype, 
ctx=i.context,
 stype=self._grad_stype) for i in 
self._data]
 # TODO(junwu): use np.zeros
-if is_np_compat():
+if is_np_shape():
 self._grad = [arr.as_np_ndarray() for arr in self._grad]
 
 autograd.mark_variables(self._check_and_get(self._data, list),
@@ -606,7 +606,7 @@ class Parameter(object):
 self._var = symbol.var(self.name, shape=self.shape, 
dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
-if is_np_compat():
+if is_np_shape():
 self._var = self._var.as_np_ndarray()
 return self._var
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index acfcce2..b21e06d 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,6 +40,7 @@ import numpy as np
 from .. import ndarray
 from ..util import is_np_shape
 
+
 def split_data(data, num_slice, batch_axis=0, even_split=True):
 """Splits an NDArray into `num_slice` slices along `batch_axis`.
 Usually used for data parallelism where each slices is sent
diff --git a/python/mxnet/ndarray/numpy/_op.py 
b/python/mxnet/ndarray/numpy/_op.py
index 725fba4..72b890d 100644
--- a/python/mxnet/ndarray/numpy/_op.py
+++ b/python/mxnet/ndarray/numpy/_op.py
@@ -20,7 +20,7 @@
 from __future__ import absolute_import
 import numpy as _np
 from ...base import numeric_types
-from ...util import _sanity_check_params, use_np_compat, set_module
+from ...util import _sanity_check_params, set_module
 from ...context import current_context
 from . import _internal as _npi
 
@@ -90,7 +90,6 @@ def ones(shape, dtype=None, **kwargs):
 
 
 #pylint: disable= too-many-arguments, no-member, protected-access
-@use_np_compat
 def _ufunc_helper(lhs, rhs, fn_array, fn_scalar, lfn_scalar, rfn_scalar=None, 
out=None):
 """ Helper function for element-wise operation.
 The function will 

[incubator-mxnet] 09/42: Change np_compat to np_shape

2019-07-18 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 8b1a03103b66dcf9e4f0198ef410680333cf28e5
Author: reminisce 
AuthorDate: Sun May 26 22:41:28 2019 -0700

Change np_compat to np_shape
---
 python/mxnet/gluon/block.py |  2 +-
 python/mxnet/gluon/parameter.py | 10 +-
 python/mxnet/gluon/utils.py |  1 +
 python/mxnet/ndarray/numpy/_op.py   |  3 +--
 python/mxnet/ndarray/register.py|  4 ++--
 python/mxnet/numpy/__init__.py  |  2 +-
 python/mxnet/numpy/multiarray.py|  8 +++-
 tests/python/unittest/test_numpy_gluon.py   |  6 +++---
 tests/python/unittest/test_numpy_ndarray.py | 20 ++--
 tests/python/unittest/test_numpy_op.py  | 16 
 10 files changed, 35 insertions(+), 37 deletions(-)

diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py
index 2f07950..2877d3b 100644
--- a/python/mxnet/gluon/block.py
+++ b/python/mxnet/gluon/block.py
@@ -551,7 +551,7 @@ class Block(object):
 
 for hook in self._forward_hooks.values():
 hook(self, args, out)
-if _mx_np.is_np_compat():
+if _mx_np.is_np_shape():
 _check_all_np_ndarrays(_flatten(out, "output")[0])
 return out
 
diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py
index 307fb15..2d3e8c0 100644
--- a/python/mxnet/gluon/parameter.py
+++ b/python/mxnet/gluon/parameter.py
@@ -31,7 +31,7 @@ from .. import symbol, ndarray, initializer, context
 from ..context import Context, cpu
 from .. import autograd
 from .utils import _indent, _brief_print_list, shape_is_known
-from .. import is_np_shape
+from ..util import is_np_shape
 
 # pylint: disable= invalid-name
 tensor_types = (symbol.Symbol, ndarray.NDArray)
@@ -188,7 +188,7 @@ class Parameter(object):
 if self._shape is None:
 self._shape = new_shape
 return
-unknown_dim_size = -1 if is_np_compat() else 0
+unknown_dim_size = -1 if is_np_shape() else 0
 assert len(self._shape) == len(new_shape) and \
 all(j in (unknown_dim_size, i) for i, j in zip(new_shape, 
self._shape)), \
 "Expected shape %s is incompatible with given shape %s."%(
@@ -330,7 +330,7 @@ class Parameter(object):
 initializer.create(default_init)(
 initializer.InitDesc(self.name, {'__init__': init}), data)
 # TODO(junwu): use np random operators when available
-if is_np_compat():
+if is_np_shape():
 data = data.as_np_ndarray()  # convert to np.ndarray
 
 self._init_impl(data, ctx)
@@ -357,7 +357,7 @@ class Parameter(object):
 self._grad = [ndarray.zeros(shape=i.shape, dtype=i.dtype, 
ctx=i.context,
 stype=self._grad_stype) for i in 
self._data]
 # TODO(junwu): use np.zeros
-if is_np_compat():
+if is_np_shape():
 self._grad = [arr.as_np_ndarray() for arr in self._grad]
 
 autograd.mark_variables(self._check_and_get(self._data, list),
@@ -606,7 +606,7 @@ class Parameter(object):
 self._var = symbol.var(self.name, shape=self.shape, 
dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
-if is_np_compat():
+if is_np_shape():
 self._var = self._var.as_np_ndarray()
 return self._var
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index a1c1e5b..ea5d242 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,6 +40,7 @@ import numpy as np
 from .. import ndarray
 from ..util import is_np_shape
 
+
 def split_data(data, num_slice, batch_axis=0, even_split=True):
 """Splits an NDArray into `num_slice` slices along `batch_axis`.
 Usually used for data parallelism where each slices is sent
diff --git a/python/mxnet/ndarray/numpy/_op.py 
b/python/mxnet/ndarray/numpy/_op.py
index 725fba4..72b890d 100644
--- a/python/mxnet/ndarray/numpy/_op.py
+++ b/python/mxnet/ndarray/numpy/_op.py
@@ -20,7 +20,7 @@
 from __future__ import absolute_import
 import numpy as _np
 from ...base import numeric_types
-from ...util import _sanity_check_params, use_np_compat, set_module
+from ...util import _sanity_check_params, set_module
 from ...context import current_context
 from . import _internal as _npi
 
@@ -90,7 +90,6 @@ def ones(shape, dtype=None, **kwargs):
 
 
 #pylint: disable= too-many-arguments, no-member, protected-access
-@use_np_compat
 def _ufunc_helper(lhs, rhs, fn_array, fn_scalar, lfn_scalar, rfn_scalar=None, 
out=None):
 """ Helper function for element-wise operation.
 The function will 

[incubator-mxnet] 09/42: Change np_compat to np_shape

2019-07-17 Thread haoj
This is an automated email from the ASF dual-hosted git repository.

haoj pushed a commit to branch numpy
in repository https://gitbox.apache.org/repos/asf/incubator-mxnet.git

commit 3712ba04cd5c0974f2c9839451d9852ee88b88a6
Author: reminisce 
AuthorDate: Sun May 26 22:41:28 2019 -0700

Change np_compat to np_shape
---
 python/mxnet/gluon/block.py |  2 +-
 python/mxnet/gluon/parameter.py | 10 +-
 python/mxnet/gluon/utils.py |  1 +
 python/mxnet/ndarray/numpy/_op.py   |  3 +--
 python/mxnet/ndarray/register.py|  4 ++--
 python/mxnet/numpy/__init__.py  |  2 +-
 python/mxnet/numpy/multiarray.py|  8 +++-
 tests/python/unittest/test_numpy_gluon.py   |  6 +++---
 tests/python/unittest/test_numpy_ndarray.py | 20 ++--
 tests/python/unittest/test_numpy_op.py  | 16 
 10 files changed, 35 insertions(+), 37 deletions(-)

diff --git a/python/mxnet/gluon/block.py b/python/mxnet/gluon/block.py
index 2f07950..2877d3b 100644
--- a/python/mxnet/gluon/block.py
+++ b/python/mxnet/gluon/block.py
@@ -551,7 +551,7 @@ class Block(object):
 
 for hook in self._forward_hooks.values():
 hook(self, args, out)
-if _mx_np.is_np_compat():
+if _mx_np.is_np_shape():
 _check_all_np_ndarrays(_flatten(out, "output")[0])
 return out
 
diff --git a/python/mxnet/gluon/parameter.py b/python/mxnet/gluon/parameter.py
index 307fb15..2d3e8c0 100644
--- a/python/mxnet/gluon/parameter.py
+++ b/python/mxnet/gluon/parameter.py
@@ -31,7 +31,7 @@ from .. import symbol, ndarray, initializer, context
 from ..context import Context, cpu
 from .. import autograd
 from .utils import _indent, _brief_print_list, shape_is_known
-from .. import is_np_shape
+from ..util import is_np_shape
 
 # pylint: disable= invalid-name
 tensor_types = (symbol.Symbol, ndarray.NDArray)
@@ -188,7 +188,7 @@ class Parameter(object):
 if self._shape is None:
 self._shape = new_shape
 return
-unknown_dim_size = -1 if is_np_compat() else 0
+unknown_dim_size = -1 if is_np_shape() else 0
 assert len(self._shape) == len(new_shape) and \
 all(j in (unknown_dim_size, i) for i, j in zip(new_shape, 
self._shape)), \
 "Expected shape %s is incompatible with given shape %s."%(
@@ -330,7 +330,7 @@ class Parameter(object):
 initializer.create(default_init)(
 initializer.InitDesc(self.name, {'__init__': init}), data)
 # TODO(junwu): use np random operators when available
-if is_np_compat():
+if is_np_shape():
 data = data.as_np_ndarray()  # convert to np.ndarray
 
 self._init_impl(data, ctx)
@@ -357,7 +357,7 @@ class Parameter(object):
 self._grad = [ndarray.zeros(shape=i.shape, dtype=i.dtype, 
ctx=i.context,
 stype=self._grad_stype) for i in 
self._data]
 # TODO(junwu): use np.zeros
-if is_np_compat():
+if is_np_shape():
 self._grad = [arr.as_np_ndarray() for arr in self._grad]
 
 autograd.mark_variables(self._check_and_get(self._data, list),
@@ -606,7 +606,7 @@ class Parameter(object):
 self._var = symbol.var(self.name, shape=self.shape, 
dtype=self.dtype,
lr_mult=self.lr_mult, wd_mult=self.wd_mult,
init=self.init, stype=self._stype)
-if is_np_compat():
+if is_np_shape():
 self._var = self._var.as_np_ndarray()
 return self._var
 
diff --git a/python/mxnet/gluon/utils.py b/python/mxnet/gluon/utils.py
index a1c1e5b..ea5d242 100644
--- a/python/mxnet/gluon/utils.py
+++ b/python/mxnet/gluon/utils.py
@@ -40,6 +40,7 @@ import numpy as np
 from .. import ndarray
 from ..util import is_np_shape
 
+
 def split_data(data, num_slice, batch_axis=0, even_split=True):
 """Splits an NDArray into `num_slice` slices along `batch_axis`.
 Usually used for data parallelism where each slices is sent
diff --git a/python/mxnet/ndarray/numpy/_op.py 
b/python/mxnet/ndarray/numpy/_op.py
index 725fba4..72b890d 100644
--- a/python/mxnet/ndarray/numpy/_op.py
+++ b/python/mxnet/ndarray/numpy/_op.py
@@ -20,7 +20,7 @@
 from __future__ import absolute_import
 import numpy as _np
 from ...base import numeric_types
-from ...util import _sanity_check_params, use_np_compat, set_module
+from ...util import _sanity_check_params, set_module
 from ...context import current_context
 from . import _internal as _npi
 
@@ -90,7 +90,6 @@ def ones(shape, dtype=None, **kwargs):
 
 
 #pylint: disable= too-many-arguments, no-member, protected-access
-@use_np_compat
 def _ufunc_helper(lhs, rhs, fn_array, fn_scalar, lfn_scalar, rfn_scalar=None, 
out=None):
 """ Helper function for element-wise operation.
 The function will