pitrou commented on a change in pull request #6302:
URL: https://github.com/apache/arrow/pull/6302#discussion_r557480517



##########
File path: cpp/src/arrow/ipc/test_common.cc
##########
@@ -1000,6 +1003,102 @@ Status MakeDictExtension(std::shared_ptr<RecordBatch>* 
out) {
   return Status::OK();
 }
 
+namespace {
+
+template <typename CValueType, typename SeedType, typename DistributionType>
+void FillRandomData(CValueType* data, size_t n, CValueType min, CValueType max,
+                    SeedType seed) {
+  std::default_random_engine rng(seed);
+  DistributionType dist(min, max);
+  std::generate(data, data + n,
+                [&dist, &rng] { return static_cast<CValueType>(dist(rng)); });
+}
+
+template <typename CValueType, typename SeedType>
+enable_if_t<std::is_integral<CValueType>::value && 
std::is_signed<CValueType>::value,
+            void>
+FillRandomData(CValueType* data, size_t n, SeedType seed) {
+  FillRandomData<CValueType, SeedType, 
std::uniform_int_distribution<CValueType>>(
+      data, n, 0, 1000, seed);
+}
+
+template <typename CValueType, typename SeedType>
+enable_if_t<std::is_integral<CValueType>::value && 
std::is_unsigned<CValueType>::value,
+            void>
+FillRandomData(CValueType* data, size_t n, SeedType seed) {
+  FillRandomData<CValueType, SeedType, 
std::uniform_int_distribution<CValueType>>(
+      data, n, -1000, 1000, seed);

Review comment:
       Looks like you're using -1000 as min for an unsigned type?

##########
File path: cpp/src/arrow/ipc/metadata_internal.cc
##########
@@ -1349,17 +1349,22 @@ Status GetTensorMetadata(const Buffer& metadata, 
std::shared_ptr<DataType>* type
     return Status::IOError("Header-type of flatbuffer-encoded Message is not 
Tensor.");
   }
 
-  int ndim = static_cast<int>(tensor->shape()->size());
+  flatbuffers::uoffset_t ndim = tensor->shape()->size();

Review comment:
       You can use `const auto`.

##########
File path: cpp/src/arrow/ipc/metadata_internal.cc
##########
@@ -1349,17 +1349,22 @@ Status GetTensorMetadata(const Buffer& metadata, 
std::shared_ptr<DataType>* type
     return Status::IOError("Header-type of flatbuffer-encoded Message is not 
Tensor.");
   }
 
-  int ndim = static_cast<int>(tensor->shape()->size());
+  flatbuffers::uoffset_t ndim = tensor->shape()->size();
 
-  for (int i = 0; i < ndim; ++i) {
+  for (flatbuffers::uoffset_t i = 0; i < ndim; ++i) {

Review comment:
       Use `decltype(ndim)` like below?




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to