jiashu-zhu opened a new issue #16365: Exception: unknown storage type: -1
URL: https://github.com/apache/incubator-mxnet/issues/16365
 
 
   ## Description
   I encounter a the exception "Exception: unknown storage type: -1" when I use 
my focal loss
   
   ## my focal loss
   the shape of out_data[0] is (batch_size, 2, anchor_num)
   the shape of in_data[1] is (batch_size, anchor_num)
   ```
   class FocalLossOperator(mx.operator.CustomOp):
       def __init__(self, gamma, alpha):
           super(FocalLossOperator, self).__init__()
           self.gamma = gamma
           self.alpha = alpha
   
       def forward(self, is_train, req, in_data, out_data, aux):
           #print('forward')
           #print(in_data[0].shape)
           y = mx.nd.exp(in_data[0] - mx.nd.max_axis(in_data[0], 
axis=1).reshape((in_data[0].shape[0], 1, -1)))
           y /= mx.nd.sum(y, axis=1).reshape((in_data[0].shape[0],1, -1))
   
           self.assign(out_data[0], req[0], y)
   
       def backward(self, req, out_grad, in_data, out_data, in_grad, aux):
           y_numpy = out_data[0].asnumpy().transpose((0,2,1))
           label_numpy = in_data[1].asnumpy()
           y_numpy = y_numpy.reshape((-1,2))
           label_numpy = label_numpy.reshape((-1))
           #print(len(np.where(label_numpy == -1)[0]))
           indices = np.where(label_numpy == -1)[0]
           label_numpy[indices] = 0
           self.pro_truth = mx.nd.array(y_numpy[np.arange(y_numpy.shape[0]), 
label_numpy.astype(np.int)])
   
           print(len(indices))
           # i!=j
           pro_truth = (self.pro_truth + 
1e-14).reshape((self.pro_truth.shape[0], 1))
           grad = self.alpha * mx.nd.power(1 - pro_truth, self.gamma - 1) * \
                  (self.gamma * (-1 * pro_truth * mx.nd.array(y_numpy)) * 
mx.nd.log(pro_truth) + mx.nd.array(y_numpy) * (1 - pro_truth))
   
           # i==j
           pro_truth = self.pro_truth + 1e-14
   
           grad_numpy = grad.asnumpy()
           grad_numpy[np.arange(y_numpy.shape[0]), label_numpy.astype(np.int)] 
= (
                       self.alpha * mx.nd.power(1 - pro_truth, self.gamma) * (
                       self.gamma * pro_truth * mx.nd.log(pro_truth) + 
pro_truth - 1)).asnumpy()
           grad_numpy /= label_numpy.shape[0]
           grad_numpy[indices,:] = 0
           #grad_numpy = 
grad_numpy.reshape((out_data[0].shape[0],-1,out_data[0].shape[1])).transpose((0,2,1))
           grad = mx.nd.array(grad_numpy)
           grad = 
grad.reshape(out_data[0].shape[0],-1,out_data[0].shape[1]).transpose((0,2,1))
   
           self.assign(in_grad[0], req[0], grad)
   
   @mx.operator.register('FocalLoss')
   class FocalLossProp(mx.operator.CustomOpProp):
       def __init__(self, gamma, alpha):
           super(FocalLossProp, self).__init__(need_top_grad=False)
   
           self.gamma = float(gamma)
           self.alpha = float(alpha)
   
       def list_arguments(self):
           return ['data', 'labels']
   
       def list_outputs(self):
           return ['output']
   
       def infer_shape(self, in_shape):
           data_shape = in_shape[0]
           labels_shape = in_shape[1]
           out_shape = data_shape
           return [data_shape, labels_shape], [out_shape], []
   
       def create_operator(self, ctx, shapes, dtypes):
           return FocalLossOperator(self.gamma, self.alpha)
   ```
   
   ## Error Message:
   Error in CustomOp.backward: Traceback (most recent call last):
     File "/home/anaconda2/lib/python2.7/site-packages/mxnet/operator.py", line 
1020, in backward_entry
       stype=stype))
     File 
"/home/anaconda2/lib/python2.7/site-packages/mxnet/ndarray/sparse.py", line 
1187, in _ndarray_cls
       raise Exception("unknown storage type: %s"%stype)
   Exception: unknown storage type: -1
   
   terminate called after throwing an instance of 'dmlc::Error'
     what():  [12:17:03] src/operator/custom/custom.cc:418: Check failed: 
reinterpret_cast<CustomOpFBFunc>(params.info->callbacks[kCustomOpBackward])( 
ptrs.size(), const_cast<void**>(ptrs.data()), const_cast<int*>(tags.data()), 
reinterpret_cast<const int*>(req.data()), static_cast<int>(ctx.is_train), 
params.info->contexts[kCustomOpBackward]) 
   
   Stack trace returned 8 entries:
   [bt] (0) 
/home/anaconda2/lib/python2.7/site-packages/mxnet/libmxnet.so(+0x40b29a) 
[0x7feccd0c829a]
   [bt] (1) 
/home/anaconda2/lib/python2.7/site-packages/mxnet/libmxnet.so(+0x40b8b1) 
[0x7feccd0c88b1]
   [bt] (2) 
/home/anaconda2/lib/python2.7/site-packages/mxnet/libmxnet.so(+0x6c6239) 
[0x7feccd383239]
   [bt] (3) 
/home/anaconda2/lib/python2.7/site-packages/mxnet/libmxnet.so(+0x6e1020) 
[0x7feccd39e020]
   [bt] (4) 
/home/anaconda2/lib/python2.7/site-packages/mxnet/libmxnet.so(+0x6c7078) 
[0x7feccd384078]
   [bt] (5) /home/anaconda2/bin/../lib/libstdc++.so.6(+0xafc5c) [0x7fed70a50c5c]
   [bt] (6) /lib/x86_64-linux-gnu/libpthread.so.0(+0x76ba) [0x7fed78a076ba]
   [bt] (7) /lib/x86_64-linux-gnu/libc.so.6(clone+0x6d) [0x7fed7802d41d]
   
   

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to