KellenSunderland commented on a change in pull request #13310: [MXNET-703] 
Update to TensorRT 5, ONNX IR 3. Fix inference bugs.
URL: https://github.com/apache/incubator-mxnet/pull/13310#discussion_r247690695
 
 

 ##########
 File path: src/operator/contrib/nnvm_to_onnx.cc
 ##########
 @@ -459,32 +458,40 @@ void ConvertPlaceholder(
 void ConvertConstant(
     GraphProto* const graph_proto, const std::string& node_name,
     std::unordered_map<std::string, NDArray>* const shared_buffer) {
-  NodeProto* const node_proto = graph_proto->add_node();
-  node_proto->set_name(node_name);
-  node_proto->add_output(node_name);
-  node_proto->set_op_type("Constant");
+  TensorProto* const initializer_proto = graph_proto->add_initializer();
+
+  // Create initializer for constants
+  initializer_proto->set_name(node_name);
+  // TODO(kellens): convert to fp16 if needed.
+  initializer_proto->set_data_type(TensorProto_DataType_FLOAT);
 
   const NDArray nd = shared_buffer->find(node_name)->second;
   const TBlob& blob = nd.data();
   const TShape shape = blob.shape_;
-  const int32_t size = shape.Size();
 
+  for (auto& dim : shape) {
+    initializer_proto->add_dims(static_cast<int64>(dim));
+  }
+
+  auto size = shape.Size();
+  // TODO(kellens): Note hard coded float32 size assumed.
   std::shared_ptr<float> shared_data_ptr(new float[size]);
   float* const data_ptr = shared_data_ptr.get();
   nd.SyncCopyToCPU(static_cast<void*>(data_ptr), size);
 
-  AttributeProto* const tensor_attr = node_proto->add_attribute();
-  tensor_attr->set_name("value");
-  tensor_attr->set_type(AttributeProto::TENSOR);
-
-  TensorProto* const tensor_proto = tensor_attr->mutable_t();
-  tensor_proto->set_data_type(TensorProto_DataType_FLOAT);
-  for (auto& dim : shape) {
-    tensor_proto->add_dims(static_cast<int64>(dim));
+  for (int blob_idx = 0; blob_idx < size; ++blob_idx) {
 
 Review comment:
   Fixed

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

Reply via email to