zheng-da commented on a change in pull request #9977: Cpu lstm inference
URL: https://github.com/apache/incubator-mxnet/pull/9977#discussion_r172948107
 
 

 ##########
 File path: src/operator/rnn-inl.h
 ##########
 @@ -144,15 +149,224 @@ class RNNOp : public Operator {
                         const std::vector<OpReqType> &req,
                         const std::vector<TBlob> &in_grad,
                         const std::vector<TBlob> &aux_args) {
-    using namespace mshadow;
-    using namespace mshadow::expr;
     // TODO(sbodenstein): add MShadow implementation
   }
 
  private:
   RNNParam param_;
 };  // class RNNOp
 
+template<typename DType>
+class RNNOp<cpu, DType> : public Operator {
+ public:
+  explicit RNNOp(RNNParam param) {
+    this->param_ = param;
+    // RNN Mode
+    switch (param_.mode) {
+      case rnn_enum::kLstm:
+        break;
+      default:
+        LOG(FATAL) << "only LSTM is implmented on CPU";
+    }
+    if (param_.mode == rnn_enum::kLstm)
+      param_.lstm_q_ = true;
+    else
+      param_.lstm_q_ = false;
 
 Review comment:
   it seems this check can be merged to the switch case statement above.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to