piiswrong closed pull request #8551: * [cpp-package] fix cpp issue
URL: https://github.com/apache/incubator-mxnet/pull/8551
 
 
   

This is a PR merged from a forked repository.
As GitHub hides the original diff on merge, it is displayed below for
the sake of provenance:

As this is a foreign pull request (from a fork), the diff is supplied
below (as it won't show otherwise due to GitHub magic):

diff --git a/cpp-package/example/alexnet.cpp b/cpp-package/example/alexnet.cpp
index 4194b5bae9..dd5d2b4b06 100644
--- a/cpp-package/example/alexnet.cpp
+++ b/cpp-package/example/alexnet.cpp
@@ -23,8 +23,7 @@
 #include <map>
 #include <string>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace std;
 using namespace mxnet::cpp;
diff --git a/cpp-package/example/charRNN.cpp b/cpp-package/example/charRNN.cpp
index f5fff853cb..218d11efc9 100644
--- a/cpp-package/example/charRNN.cpp
+++ b/cpp-package/example/charRNN.cpp
@@ -43,8 +43,6 @@
 #include <chrono>
 #include "mxnet-cpp/MxNetCpp.h"
 
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
 
 using namespace std;
 using namespace mxnet::cpp;
diff --git a/cpp-package/example/googlenet.cpp 
b/cpp-package/example/googlenet.cpp
index ac0585e81a..fe5dea6a1f 100644
--- a/cpp-package/example/googlenet.cpp
+++ b/cpp-package/example/googlenet.cpp
@@ -22,10 +22,8 @@
 #include <string>
 #include <vector>
 #include <map>
-
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace mxnet::cpp;
 
@@ -159,8 +157,8 @@ int main(int argc, char const *argv[]) {
     train_iter.Reset();
     while (train_iter.Next()) {
       auto data_batch = train_iter.GetDataBatch();
-      args_map["data"] = data_batch.data.Copy(Context::gpu());
-      args_map["data_label"] = data_batch.label.Copy(Context::gpu());
+      data_batch.data.CopyTo(&args_map["data"]);
+      data_batch.label.CopyTo(&args_map["data_label"]);
       NDArray::WaitAll();
       exec->Forward(true);
       exec->Backward();
@@ -174,8 +172,8 @@ int main(int argc, char const *argv[]) {
     val_iter.Reset();
     while (val_iter.Next()) {
       auto data_batch = val_iter.GetDataBatch();
-      args_map["data"] = data_batch.data.Copy(Context::gpu());
-      args_map["data_label"] = data_batch.label.Copy(Context::gpu());
+      data_batch.data.CopyTo(&args_map["data"]);
+      data_batch.label.CopyTo(&args_map["data_label"]);
       NDArray::WaitAll();
       exec->Forward(false);
       NDArray::WaitAll();
diff --git a/cpp-package/example/inception_bn.cpp 
b/cpp-package/example/inception_bn.cpp
index de21aadea9..e6f47904e0 100644
--- a/cpp-package/example/inception_bn.cpp
+++ b/cpp-package/example/inception_bn.cpp
@@ -19,13 +19,11 @@
 
 /*!
  */
-#include <iostream>
 #include <map>
 #include <string>
 #include <vector>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace mxnet::cpp;
 
diff --git a/cpp-package/example/lenet.cpp b/cpp-package/example/lenet.cpp
index 05cc4517fe..4c5a1f1165 100644
--- a/cpp-package/example/lenet.cpp
+++ b/cpp-package/example/lenet.cpp
@@ -19,14 +19,12 @@
 
 /*!
  */
-#include <iostream>
 #include <fstream>
 #include <map>
 #include <string>
 #include <vector>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace std;
 using namespace mxnet::cpp;
diff --git a/cpp-package/example/lenet_with_mxdataiter.cpp 
b/cpp-package/example/lenet_with_mxdataiter.cpp
index 077f556225..04f5cbca3a 100644
--- a/cpp-package/example/lenet_with_mxdataiter.cpp
+++ b/cpp-package/example/lenet_with_mxdataiter.cpp
@@ -19,14 +19,12 @@
 
 /*!
  */
-#include <iostream>
-#include <fstream>
 #include <map>
 #include <string>
 #include <vector>
+#include <chrono>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace std;
 using namespace mxnet::cpp;
@@ -89,15 +87,15 @@ int main(int argc, char const *argv[]) {
   args_map["fc2_b"] = 0;
 
   auto train_iter = MXDataIter("MNISTIter")
-      .SetParam("image", "./train-images-idx3-ubyte")
-      .SetParam("label", "./train-labels-idx1-ubyte")
+      .SetParam("image", "./mnist_data/train-images-idx3-ubyte")
+      .SetParam("label", "./mnist_data/train-labels-idx1-ubyte")
       .SetParam("batch_size", batch_size)
       .SetParam("shuffle", 1)
       .SetParam("flat", 0)
       .CreateDataIter();
   auto val_iter = MXDataIter("MNISTIter")
-      .SetParam("image", "./t10k-images-idx3-ubyte")
-      .SetParam("label", "./t10k-labels-idx1-ubyte")
+      .SetParam("image", "./mnist_data/t10k-images-idx3-ubyte")
+      .SetParam("label", "./mnist_data/t10k-labels-idx1-ubyte")
       .CreateDataIter();
 
   Optimizer* opt = OptimizerRegistry::Find("ccsgd");
@@ -111,35 +109,62 @@ int main(int argc, char const *argv[]) {
   auto *exec = lenet.SimpleBind(Context::gpu(), args_map);
   auto arg_names = lenet.ListArguments();
 
+  // Create metrics
+  Accuracy train_acc, val_acc;
+
   for (int iter = 0; iter < max_epoch; ++iter) {
-    LG << "Epoch: " << iter;
-    train_iter.Reset();
-    while (train_iter.Next()) {
+      int samples = 0;
+      train_iter.Reset();
+      train_acc.Reset();
+
+      auto tic = chrono::system_clock::now();
+
+     while (train_iter.Next()) {
+      samples += batch_size;
       auto data_batch = train_iter.GetDataBatch();
-      args_map["data"] = data_batch.data.Copy(Context::gpu());
-      args_map["data_label"] = data_batch.label.Copy(Context::gpu());
+
+      data_batch.data.CopyTo(&args_map["data"]);
+      data_batch.label.CopyTo(&args_map["data_label"]);
       NDArray::WaitAll();
+
+      // Compute gradients
       exec->Forward(true);
       exec->Backward();
+
       // Update parameters
       for (size_t i = 0; i < arg_names.size(); ++i) {
         if (arg_names[i] == "data" || arg_names[i] == "data_label") continue;
         opt->Update(i, exec->arg_arrays[i], exec->grad_arrays[i]);
       }
+
+      // Update metric
+      train_acc.Update(data_batch.label, exec->outputs[0]);
     }
 
+     // one epoch of training is finished
+     auto toc = chrono::system_clock::now();
+     float duration = chrono::duration_cast<chrono::milliseconds>(toc - 
tic).count() / 1000.0;
+     LG << "Epoch[" << iter << "] " << samples / duration \
+         << " samples/sec " << "Train-Accuracy=" << train_acc.Get();;
+
+      val_iter.Reset();
+      val_acc.Reset();
+
     Accuracy acu;
     val_iter.Reset();
     while (val_iter.Next()) {
       auto data_batch = val_iter.GetDataBatch();
-      args_map["data"] = data_batch.data.Copy(Context::gpu());
-      args_map["data_label"] = data_batch.label.Copy(Context::gpu());
+      data_batch.data.CopyTo(&args_map["data"]);
+      data_batch.label.CopyTo(&args_map["data_label"]);
       NDArray::WaitAll();
+
+      // Only forward pass is enough as no gradient is needed when evaluating
       exec->Forward(false);
       NDArray::WaitAll();
       acu.Update(data_batch.label, exec->outputs[0]);
+      val_acc.Update(data_batch.label, exec->outputs[0]);
     }
-    LG << "Accuracy: " << acu.Get();
+    LG << "Epoch[" << iter << "] Val-Accuracy=" << val_acc.Get();
   }
 
   delete exec;
diff --git a/cpp-package/example/mlp.cpp b/cpp-package/example/mlp.cpp
index c9c4ff2451..b40328da6e 100644
--- a/cpp-package/example/mlp.cpp
+++ b/cpp-package/example/mlp.cpp
@@ -24,8 +24,7 @@
 #include <vector>
 #include <string>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace std;
 using namespace mxnet::cpp;
diff --git a/cpp-package/example/mlp_cpu.cpp b/cpp-package/example/mlp_cpu.cpp
index 748c32e8c2..051bad1bd2 100644
--- a/cpp-package/example/mlp_cpu.cpp
+++ b/cpp-package/example/mlp_cpu.cpp
@@ -106,8 +106,8 @@ int main(int argc, char** argv) {
       samples += batch_size;
       auto data_batch = train_iter.GetDataBatch();
       // Set data and label
-      args["X"] = data_batch.data;
-      args["label"] = data_batch.label;
+      data_batch.data.CopyTo(&args["X"]);
+      data_batch.label.CopyTo(&args["label"]);
 
       // Compute gradients
       exec->Forward(true);
@@ -124,8 +124,8 @@ int main(int argc, char** argv) {
     val_iter.Reset();
     while (val_iter.Next()) {
       auto data_batch = val_iter.GetDataBatch();
-      args["X"] = data_batch.data;
-      args["label"] = data_batch.label;
+      data_batch.data.CopyTo(&args["X"]);
+      data_batch.label.CopyTo(&args["label"]);
       // Forward pass is enough as no gradient is needed when evaluating
       exec->Forward(false);
       acc.Update(data_batch.label, exec->outputs[0]);
diff --git a/cpp-package/example/resnet.cpp b/cpp-package/example/resnet.cpp
index ca5643de9d..03b3d72176 100644
--- a/cpp-package/example/resnet.cpp
+++ b/cpp-package/example/resnet.cpp
@@ -19,13 +19,11 @@
 
 /*!
  */
-#include <iostream>
 #include <map>
 #include <string>
 #include <vector>
 #include "mxnet-cpp/MxNetCpp.h"
-// Allow IDE to parse the types
-#include "../include/mxnet-cpp/op.h"
+
 
 using namespace mxnet::cpp;
 


 

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to