friendlymatthew commented on code in PR #7921:
URL: https://github.com/apache/arrow-rs/pull/7921#discussion_r2217257713


##########
parquet-variant-compute/src/shredding.rs:
##########
@@ -0,0 +1,364 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow_schema::{ArrowError, DataType, Fields};
+
+// Keywords defined by the shredding spec
+pub const METADATA: &str = "metadata";
+pub const VALUE: &str = "value";
+pub const TYPED_VALUE: &str = "typed_value";
+
+pub fn validate_value_and_typed_value(
+    fields: &Fields,
+    allow_both_null: bool,
+) -> Result<(), ArrowError> {
+    let value_field_res = fields.iter().find(|f| f.name() == VALUE);
+    let typed_value_field_res = fields.iter().find(|f| f.name() == 
TYPED_VALUE);
+
+    if !allow_both_null {
+        if let (None, None) = (value_field_res, typed_value_field_res) {
+            return Err(ArrowError::InvalidArgumentError(
+                "Invalid VariantArray: StructArray must contain either `value` 
or `typed_value` fields or both.".to_string()
+            ));
+        }
+    }
+
+    if let Some(value_field) = value_field_res {
+        // if !value_field.is_nullable() {
+        //     return Err(ArrowError::InvalidArgumentError(
+        //         "Expected value field to be nullable".to_string(),
+        //     ));
+        // }
+
+        if value_field.data_type() != &DataType::BinaryView {
+            return Err(ArrowError::NotYetImplemented(format!(
+                "VariantArray 'value' field must be BinaryView, got {}",
+                value_field.data_type()
+            )));
+        }
+    }
+
+    if let Some(typed_value_field) = fields.iter().find(|f| f.name() == 
TYPED_VALUE) {
+        // if !typed_value_field.is_nullable() {
+        //     return Err(ArrowError::InvalidArgumentError(
+        //         "Expected value field to be nullable".to_string(),
+        //     ));
+        // }
+
+        // this is directly mapped from the spec's parquet physical types
+        // note, there are more data types we can support
+        // but for the sake of simplicity, I chose the smallest subset
+        match typed_value_field.data_type() {
+            DataType::Boolean
+            | DataType::Int32
+            | DataType::Int64
+            | DataType::Float32
+            | DataType::Float64
+            | DataType::BinaryView => {}
+            DataType::Union(union_fields, _) => {
+                union_fields
+                    .iter()
+                    .map(|(_, f)| f.clone())
+                    .try_for_each(|f| {
+                        let DataType::Struct(fields) = f.data_type().clone() 
else {
+                            return Err(ArrowError::InvalidArgumentError(
+                                "Expected struct".to_string(),
+                            ));
+                        };
+
+                        validate_value_and_typed_value(&fields, false)
+                    })?;
+            }
+            DataType::Dictionary(key, value) => {
+                if key.as_ref() != &DataType::Utf8View {
+                    return Err(ArrowError::NotYetImplemented(format!(
+                        "Unsupported type. Expected dictionary key to be 
Utf8View, got {key}"
+                    )));
+                }
+
+                if let DataType::Struct(fields) = value.as_ref() {
+                    validate_value_and_typed_value(fields, true)?;
+                } else {
+                    return Err(ArrowError::NotYetImplemented(format!(
+                        "Unsupported type. Expected dictionary values to be 
Utf8View, got {value}"
+                    )));
+                }
+            }
+            DataType::Struct(fields) => validate_value_and_typed_value(fields, 
false)?, // the ide
+            foreign => {
+                return Err(ArrowError::NotYetImplemented(format!(
+                    "Unsupported VariantArray 'typed_value' field, got 
{foreign}"
+                )))
+            }
+        }
+    }
+
+    Ok(())
+}
+
+/// Validates that the provided [`Fields`] conform to the Variant shredding 
specification.
+///
+/// # Requirements
+/// - Must contain a "metadata" field of type BinaryView
+/// - Must contain at least one of "value" (optional BinaryView) or 
"typed_value" (optional with valid Parquet type)
+/// - Both "value" and "typed_value" can only be null simultaneously for 
shredded object fields
+pub fn validate_shredded_schema(fields: &Fields) -> Result<(), ArrowError> {
+    let metadata_field = fields
+        .iter()
+        .find(|f| f.name() == METADATA)
+        .ok_or_else(|| {
+            ArrowError::InvalidArgumentError(
+                "Invalid VariantArray: StructArray must contain a 'metadata' 
field".to_string(),
+            )
+        })?;
+
+    if metadata_field.is_nullable() {
+        return Err(ArrowError::InvalidArgumentError(
+            "Invalid VariantArray: metadata field can not be 
nullable".to_string(),
+        ));
+    }

Review Comment:
   Yes, that is how I understood it as well. 
   
   `validate_shredded_schema` should be called at the top-level, if nested 
schemas exist, we recursively call `validate_value_and_typed_value`
   
   This way, we only validate the metadata column once and at the top level



##########
parquet-variant-compute/src/shredding.rs:
##########
@@ -0,0 +1,364 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use arrow_schema::{ArrowError, DataType, Fields};
+
+// Keywords defined by the shredding spec
+pub const METADATA: &str = "metadata";
+pub const VALUE: &str = "value";
+pub const TYPED_VALUE: &str = "typed_value";
+
+pub fn validate_value_and_typed_value(
+    fields: &Fields,
+    allow_both_null: bool,
+) -> Result<(), ArrowError> {
+    let value_field_res = fields.iter().find(|f| f.name() == VALUE);
+    let typed_value_field_res = fields.iter().find(|f| f.name() == 
TYPED_VALUE);
+
+    if !allow_both_null {
+        if let (None, None) = (value_field_res, typed_value_field_res) {
+            return Err(ArrowError::InvalidArgumentError(
+                "Invalid VariantArray: StructArray must contain either `value` 
or `typed_value` fields or both.".to_string()
+            ));
+        }
+    }
+
+    if let Some(value_field) = value_field_res {
+        // if !value_field.is_nullable() {
+        //     return Err(ArrowError::InvalidArgumentError(
+        //         "Expected value field to be nullable".to_string(),
+        //     ));
+        // }
+
+        if value_field.data_type() != &DataType::BinaryView {
+            return Err(ArrowError::NotYetImplemented(format!(
+                "VariantArray 'value' field must be BinaryView, got {}",
+                value_field.data_type()
+            )));
+        }
+    }
+
+    if let Some(typed_value_field) = fields.iter().find(|f| f.name() == 
TYPED_VALUE) {
+        // if !typed_value_field.is_nullable() {
+        //     return Err(ArrowError::InvalidArgumentError(
+        //         "Expected value field to be nullable".to_string(),
+        //     ));
+        // }
+
+        // this is directly mapped from the spec's parquet physical types
+        // note, there are more data types we can support
+        // but for the sake of simplicity, I chose the smallest subset
+        match typed_value_field.data_type() {
+            DataType::Boolean
+            | DataType::Int32
+            | DataType::Int64
+            | DataType::Float32
+            | DataType::Float64
+            | DataType::BinaryView => {}
+            DataType::Union(union_fields, _) => {
+                union_fields
+                    .iter()
+                    .map(|(_, f)| f.clone())
+                    .try_for_each(|f| {
+                        let DataType::Struct(fields) = f.data_type().clone() 
else {
+                            return Err(ArrowError::InvalidArgumentError(
+                                "Expected struct".to_string(),
+                            ));
+                        };
+
+                        validate_value_and_typed_value(&fields, false)
+                    })?;
+            }
+            DataType::Dictionary(key, value) => {
+                if key.as_ref() != &DataType::Utf8View {
+                    return Err(ArrowError::NotYetImplemented(format!(
+                        "Unsupported type. Expected dictionary key to be 
Utf8View, got {key}"
+                    )));
+                }
+
+                if let DataType::Struct(fields) = value.as_ref() {
+                    validate_value_and_typed_value(fields, true)?;
+                } else {
+                    return Err(ArrowError::NotYetImplemented(format!(
+                        "Unsupported type. Expected dictionary values to be 
Utf8View, got {value}"
+                    )));
+                }
+            }
+            DataType::Struct(fields) => validate_value_and_typed_value(fields, 
false)?, // the ide
+            foreign => {
+                return Err(ArrowError::NotYetImplemented(format!(
+                    "Unsupported VariantArray 'typed_value' field, got 
{foreign}"
+                )))
+            }
+        }
+    }
+
+    Ok(())
+}
+
+/// Validates that the provided [`Fields`] conform to the Variant shredding 
specification.
+///
+/// # Requirements
+/// - Must contain a "metadata" field of type BinaryView
+/// - Must contain at least one of "value" (optional BinaryView) or 
"typed_value" (optional with valid Parquet type)
+/// - Both "value" and "typed_value" can only be null simultaneously for 
shredded object fields
+pub fn validate_shredded_schema(fields: &Fields) -> Result<(), ArrowError> {
+    let metadata_field = fields
+        .iter()
+        .find(|f| f.name() == METADATA)
+        .ok_or_else(|| {
+            ArrowError::InvalidArgumentError(
+                "Invalid VariantArray: StructArray must contain a 'metadata' 
field".to_string(),
+            )
+        })?;
+
+    if metadata_field.is_nullable() {
+        return Err(ArrowError::InvalidArgumentError(
+            "Invalid VariantArray: metadata field can not be 
nullable".to_string(),
+        ));
+    }

Review Comment:
   Yes, that is how I understood it as well. 
   
   `validate_shredded_schema` is called at the top-level, if nested schemas 
exist, we recursively call `validate_value_and_typed_value`
   
   This way, we only validate the metadata column once and at the top level



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to