mbrobbel commented on code in PR #7841:
URL: https://github.com/apache/arrow-rs/pull/7841#discussion_r2204625322


##########
parquet/tests/arrow_reader/statistics.rs:
##########
@@ -1944,11 +1948,77 @@ async fn test_float16() {
 }
 
 #[tokio::test]
-async fn test_decimal() {
-    // This creates a parquet file of 1 column "decimal_col" with decimal data 
type and precicion 9, scale 2
+async fn test_decimal32() {
+    // This creates a parquet file of 1 column "decimal32_col" with decimal 
data type and precicion 9, scale 2

Review Comment:
   ```suggestion
       // This creates a parquet file of 1 column "decimal32_col" with decimal 
data type and precision 9, scale 2
   ```



##########
parquet/tests/arrow_reader/statistics.rs:
##########
@@ -1944,11 +1948,77 @@ async fn test_float16() {
 }
 
 #[tokio::test]
-async fn test_decimal() {
-    // This creates a parquet file of 1 column "decimal_col" with decimal data 
type and precicion 9, scale 2
+async fn test_decimal32() {
+    // This creates a parquet file of 1 column "decimal32_col" with decimal 
data type and precicion 9, scale 2
     // file has 3 record batches, each has 5 rows. They will be saved into 3 
row groups
     let reader = TestReader {
-        scenario: Scenario::Decimal,
+        scenario: Scenario::Decimal32,
+        row_per_group: 5,
+    }
+    .build()
+    .await;
+
+    Test {
+        reader: &reader,
+        expected_min: Arc::new(
+            Decimal32Array::from(vec![100, -500, 2000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_max: Arc::new(
+            Decimal32Array::from(vec![600, 600, 6000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_null_counts: UInt64Array::from(vec![0, 0, 0]),
+        expected_row_counts: Some(UInt64Array::from(vec![5, 5, 5])),
+        // stats are exact
+        expected_max_value_exact: BooleanArray::from(vec![true, true, true]),
+        expected_min_value_exact: BooleanArray::from(vec![true, true, true]),
+        column_name: "decimal32_col",
+        check: Check::Both,
+    }
+    .run();
+}
+#[tokio::test]
+async fn test_decimal64() {
+    // This creates a parquet file of 1 column "decimal64_col" with decimal 
data type and precicion 9, scale 2

Review Comment:
   ```suggestion
       // This creates a parquet file of 1 column "decimal64_col" with decimal 
data type and precision 9, scale 2
   ```



##########
parquet/tests/arrow_reader/statistics.rs:
##########
@@ -1944,11 +1948,77 @@ async fn test_float16() {
 }
 
 #[tokio::test]
-async fn test_decimal() {
-    // This creates a parquet file of 1 column "decimal_col" with decimal data 
type and precicion 9, scale 2
+async fn test_decimal32() {
+    // This creates a parquet file of 1 column "decimal32_col" with decimal 
data type and precicion 9, scale 2
     // file has 3 record batches, each has 5 rows. They will be saved into 3 
row groups
     let reader = TestReader {
-        scenario: Scenario::Decimal,
+        scenario: Scenario::Decimal32,
+        row_per_group: 5,
+    }
+    .build()
+    .await;
+
+    Test {
+        reader: &reader,
+        expected_min: Arc::new(
+            Decimal32Array::from(vec![100, -500, 2000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_max: Arc::new(
+            Decimal32Array::from(vec![600, 600, 6000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_null_counts: UInt64Array::from(vec![0, 0, 0]),
+        expected_row_counts: Some(UInt64Array::from(vec![5, 5, 5])),
+        // stats are exact
+        expected_max_value_exact: BooleanArray::from(vec![true, true, true]),
+        expected_min_value_exact: BooleanArray::from(vec![true, true, true]),
+        column_name: "decimal32_col",
+        check: Check::Both,
+    }
+    .run();
+}
+#[tokio::test]
+async fn test_decimal64() {
+    // This creates a parquet file of 1 column "decimal64_col" with decimal 
data type and precicion 9, scale 2
+    // file has 3 record batches, each has 5 rows. They will be saved into 3 
row groups
+    let reader = TestReader {
+        scenario: Scenario::Decimal64,
+        row_per_group: 5,
+    }
+    .build()
+    .await;
+
+    Test {
+        reader: &reader,
+        expected_min: Arc::new(
+            Decimal64Array::from(vec![100, -500, 2000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_max: Arc::new(
+            Decimal64Array::from(vec![600, 600, 6000])
+                .with_precision_and_scale(9, 2)
+                .unwrap(),
+        ),
+        expected_null_counts: UInt64Array::from(vec![0, 0, 0]),
+        expected_row_counts: Some(UInt64Array::from(vec![5, 5, 5])),
+        // stats are exact
+        expected_max_value_exact: BooleanArray::from(vec![true, true, true]),
+        expected_min_value_exact: BooleanArray::from(vec![true, true, true]),
+        column_name: "decimal64_col",
+        check: Check::Both,
+    }
+    .run();
+}
+#[tokio::test]
+async fn test_decimal128() {
+    // This creates a parquet file of 1 column "decimal128_col" with decimal 
data type and precicion 9, scale 2

Review Comment:
   ```suggestion
       // This creates a parquet file of 1 column "decimal128_col" with decimal 
data type and precision 9, scale 2
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to