matteosal opened a new issue #20149:
URL: https://github.com/apache/incubator-mxnet/issues/20149


   This program creates a simple operator and runs forward + backward with the 
C API, checking for failures:
   ```
   #include <iostream>
   
   #include "mxnet/c_api.h"
   #include "nnvm/c_api.h"
   
   #define checkedMXCall(func, ...)                              \
     {                                                           \
       if (func(__VA_ARGS__) != 0) {                             \
         printf("MX call %s failed at line %d:\n%s",             \
               #func, __LINE__, MXGetLastError());               \
         exit(1)               ;                                 \
       }                                                         \
     }
   
   int main() {
   
     /* Create symbol variables */
     SymbolHandle in_sym;
     checkedMXCall(MXSymbolCreateVariable, "in", &in_sym);
   
     /* Create symbol */
     OpHandle op;
     NNGetOpHandle("sin", &op);
     SymbolHandle sym;
     checkedMXCall(MXSymbolCreateAtomicSymbol, op, 0, nullptr, nullptr, &sym);
     checkedMXCall(MXSymbolCompose, sym, "Sin", 1, nullptr, &in_sym);
   
     /* Create NDArray for argument */
     int dev_type = 1;
     int dev_id = 0; 
     mx_uint shape[2] = {2, 3};
     NDArrayHandle in_arr;
     checkedMXCall(MXNDArrayCreate, shape, 2, dev_type, dev_id, 0, 0, &in_arr);
   
     /* Create NDArray for gradient and attach gradient */
     NDArrayHandle grad_arr;
     checkedMXCall(MXNDArrayCreate, shape, 2, dev_type, dev_id, 0, 0, 
&grad_arr);
     uint32_t grad_req[1] = {1}; 
     checkedMXCall(MXAutogradMarkVariables, 1, &in_arr, grad_req, &grad_arr);
   
     /* Create cached op */
     const char *cachedop_keys[1] = {"static_alloc"};
     const char *cachedop_vals[1] = {"true"};
     CachedOpHandle cached_op;
     checkedMXCall(MXCreateCachedOp, sym, 1, cachedop_keys, cachedop_vals, 
&cached_op, true);
   
     /* Set autograd to record */
     int dummy_prev;
     checkedMXCall(MXAutogradSetIsRecording, 1, &dummy_prev);
   
     /* Run cached op */
     int n_outs;
     NDArrayHandle *out_arr = nullptr;
     const int *dummy_stypes = nullptr;
     checkedMXCall(MXInvokeCachedOp, cached_op, 1, &in_arr, dev_type, dev_id, 
&n_outs, &out_arr, &dummy_stypes);
   
     /* Check that autograd is recording */
     bool res;
     checkedMXCall(MXAutogradIsRecording, &res);
     std::cout << "IsRecording: " << res << "\n";
   
     /* Create NDArray for outgrad and run backward */
     NDArrayHandle outgrad_arr;
     checkedMXCall(MXNDArrayCreate, shape, 2, dev_type, dev_id, 0, 0, 
&outgrad_arr);
     checkedMXCall(MXAutogradBackward, 1, out_arr, &outgrad_arr, true);
   
     return 0;
   }
   ```
   The output is:
   ```
   [20:23:58] 
/home/matteo/Git/mxnet-build/Build/Linux-x86-64/MKL/mxnet/src/storage/storage.cc:199:
 Using Pooled (Naive) StorageManager for CPU
   IsRecording: 1
   MX call MXAutogradBackward failed at line 65:
   MXNetError: Check failed: !AGInfo: :IsNone(*i): Cannot differentiate node 
because it is not in a computational graph. You need to set is_recording to 
true or use autograd.record() to save computational graphs for backward. If you 
want to differentiate the same graph twice, you need to pass retain_graph=True 
to backward.
   Stack trace:
     File 
"/home/matteo/Git/mxnet-build/Build/Linux-x86-64/MKL/mxnet/src/imperative/imperative.cc",
 line 402
   ```
   
   Why is `MXAutogradBackward` complaining about autograd recording if 
`MXAutogradIsRecording` gives `true`? Am I doing something wrong here?
   
   Thanks


-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
[email protected]



---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to