alamb commented on code in PR #7884:
URL: https://github.com/apache/arrow-rs/pull/7884#discussion_r2198414052


##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,136 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use std::sync::Arc;
+
+use arrow::array::{
+    Array, ArrayRef, BinaryBuilder, BooleanBufferBuilder, StringArray, 
StructArray,
+};
+use arrow::buffer::NullBuffer;
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    let metadata_field = Field::new("metadata", DataType::Binary, true);
+    let value_field = Field::new("value", DataType::Binary, true);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {
+    let input_string_array = match 
input.as_any().downcast_ref::<StringArray>() {
+        Some(string_array) => Ok(string_array),
+        None => Err(ArrowError::CastError(
+            "Expected reference to StringArray as input".into(),
+        )),
+    }?;
+
+    let mut metadata_builder = BinaryBuilder::new();
+    let mut value_builder = BinaryBuilder::new();
+    let mut validity = BooleanBufferBuilder::new(input.len());
+    for i in 0..input.len() {
+        if input.is_null(i) {
+            metadata_builder.append_null();
+            value_builder.append_null();
+            validity.append(false);
+        } else {
+            let mut vb = VariantBuilder::new();

Review Comment:
   > Oh, so just to be clear the two copies you are referring to are 
metadata_builder.append_value(&metadata); and metadata_builder.finish() right? 
If so, I'll take care of it in this PR itself.
   
   What I was really imagining was updating VariantBuilder so it could take a 
pre-existing buffer (Vec) and append to it, rather than writing into a new 
buffer and then copying that into the output bytes.



##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,181 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! Module for transforming a batch of JSON strings into a batch of Variants 
represented as
+//! STRUCT<metadata: BINARY, value: BINARY>
+
+use std::sync::Arc;
+
+use arrow::array::{Array, ArrayRef, BinaryArray, BooleanBufferBuilder, 
StringArray, StructArray};
+use arrow::buffer::{Buffer, NullBuffer, OffsetBuffer, ScalarBuffer};
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    // The subfields are expected to be non-nullable according to the parquet 
variant spec.
+    let metadata_field = Field::new("metadata", DataType::Binary, false);
+    let value_field = Field::new("value", DataType::Binary, false);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+/// Parse a batch of JSON strings into a batch of Variants represented as
+/// STRUCT<metadata: BINARY, value: BINARY> where nulls are preserved. The 
JSON strings in the input
+/// must be valid.
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {
+    let input_string_array = match 
input.as_any().downcast_ref::<StringArray>() {
+        Some(string_array) => Ok(string_array),
+        None => Err(ArrowError::CastError(
+            "Expected reference to StringArray as input".into(),
+        )),
+    }?;
+
+    // Zero-copy builders
+    let mut metadata_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut metadata_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut metadata_validity = BooleanBufferBuilder::new(input.len());
+    let mut metadata_current_offset: i32 = 0;
+    metadata_offsets.push(metadata_current_offset);
+
+    let mut value_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut value_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut value_validity = BooleanBufferBuilder::new(input.len());
+    let mut value_current_offset: i32 = 0;
+    value_offsets.push(value_current_offset);
+
+    let mut validity = BooleanBufferBuilder::new(input.len());
+    for i in 0..input.len() {
+        if input.is_null(i) {
+            // The subfields are expected to be non-nullable according to the 
parquet variant spec.
+            metadata_validity.append(true);
+            value_validity.append(true);
+            metadata_offsets.push(metadata_current_offset);
+            value_offsets.push(value_current_offset);
+            validity.append(false);
+        } else {
+            let mut vb = VariantBuilder::new();
+            json_to_variant(input_string_array.value(i), &mut vb)?;
+            let (metadata, value) = vb.finish();
+            validity.append(true);
+
+            metadata_current_offset += metadata.len() as i32;
+            metadata_buffer.extend(metadata);
+            metadata_offsets.push(metadata_current_offset);
+            metadata_validity.append(true);
+
+            value_current_offset += value.len() as i32;
+            value_buffer.extend(value);
+            value_offsets.push(value_current_offset);
+            value_validity.append(true);
+            println!("{value_current_offset} {metadata_current_offset}");
+        }
+    }
+    let metadata_offsets_buffer = 
OffsetBuffer::new(ScalarBuffer::from(metadata_offsets));
+    let metadata_data_buffer = Buffer::from_vec(metadata_buffer);
+    let metadata_null_buffer = NullBuffer::new(metadata_validity.finish());
+
+    let value_offsets_buffer = 
OffsetBuffer::new(ScalarBuffer::from(value_offsets));
+    let value_data_buffer = Buffer::from_vec(value_buffer);
+    let value_null_buffer = NullBuffer::new(value_validity.finish());
+
+    let metadata_array = BinaryArray::new(
+        metadata_offsets_buffer,
+        metadata_data_buffer,
+        Some(metadata_null_buffer),
+    );
+    let value_array = BinaryArray::new(
+        value_offsets_buffer,
+        value_data_buffer,
+        Some(value_null_buffer),
+    );
+
+    let struct_fields: Vec<ArrayRef> = vec![Arc::new(metadata_array), 
Arc::new(value_array)];
+    let variant_fields = match variant_arrow_repr() {
+        DataType::Struct(fields) => fields,
+        _ => unreachable!("variant_arrow_repr is hard-coded and must match the 
expected schema"),
+    };
+    let null_buffer = NullBuffer::new(validity.finish());
+    Ok(StructArray::new(
+        variant_fields,
+        struct_fields,
+        Some(null_buffer),
+    ))
+}
+
+#[cfg(test)]
+mod test {
+    use crate::batch_json_string_to_variant;
+    use arrow::array::{Array, ArrayRef, BinaryArray, StringArray};
+    use arrow_schema::ArrowError;
+    use parquet_variant::{Variant, VariantBuilder};
+    use std::sync::Arc;
+
+    #[test]
+    fn test_batch_json_string_to_variant() -> Result<(), ArrowError> {
+        let input = StringArray::from(vec![
+            Some("1"),
+            None,
+            Some("{\"a\": 32}"),
+            Some("null"),
+            None,
+        ]);

Review Comment:
   I agree full coverage here is redundant -- maybe we can add a comment that 
says "full json parsing coverage is handled by the tests for `json_to_variant`"



##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,136 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+use std::sync::Arc;
+
+use arrow::array::{
+    Array, ArrayRef, BinaryBuilder, BooleanBufferBuilder, StringArray, 
StructArray,
+};
+use arrow::buffer::NullBuffer;
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    let metadata_field = Field::new("metadata", DataType::Binary, true);
+    let value_field = Field::new("value", DataType::Binary, true);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {

Review Comment:
   Maybe we can call it `cast_json_to_variant` and `cast_variant_to_json` 🤔 



##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,181 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! Module for transforming a batch of JSON strings into a batch of Variants 
represented as
+//! STRUCT<metadata: BINARY, value: BINARY>
+
+use std::sync::Arc;
+
+use arrow::array::{Array, ArrayRef, BinaryArray, BooleanBufferBuilder, 
StringArray, StructArray};
+use arrow::buffer::{Buffer, NullBuffer, OffsetBuffer, ScalarBuffer};
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    // The subfields are expected to be non-nullable according to the parquet 
variant spec.
+    let metadata_field = Field::new("metadata", DataType::Binary, false);
+    let value_field = Field::new("value", DataType::Binary, false);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+/// Parse a batch of JSON strings into a batch of Variants represented as
+/// STRUCT<metadata: BINARY, value: BINARY> where nulls are preserved. The 
JSON strings in the input
+/// must be valid.
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {
+    let input_string_array = match 
input.as_any().downcast_ref::<StringArray>() {
+        Some(string_array) => Ok(string_array),
+        None => Err(ArrowError::CastError(
+            "Expected reference to StringArray as input".into(),
+        )),
+    }?;
+
+    // Zero-copy builders
+    let mut metadata_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut metadata_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut metadata_validity = BooleanBufferBuilder::new(input.len());
+    let mut metadata_current_offset: i32 = 0;
+    metadata_offsets.push(metadata_current_offset);
+
+    let mut value_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut value_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut value_validity = BooleanBufferBuilder::new(input.len());
+    let mut value_current_offset: i32 = 0;
+    value_offsets.push(value_current_offset);
+
+    let mut validity = BooleanBufferBuilder::new(input.len());
+    for i in 0..input.len() {
+        if input.is_null(i) {
+            // The subfields are expected to be non-nullable according to the 
parquet variant spec.

Review Comment:
   > But I guess we still have to push something to the offset arrays, to 
maintain proper positioning... so valid-but-empty is probably the best we can 
do?
   
   This offset layout is required by the Arrow spec for StringArrays



##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,181 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! Module for transforming a batch of JSON strings into a batch of Variants 
represented as
+//! STRUCT<metadata: BINARY, value: BINARY>
+
+use std::sync::Arc;
+
+use arrow::array::{Array, ArrayRef, BinaryArray, BooleanBufferBuilder, 
StringArray, StructArray};
+use arrow::buffer::{Buffer, NullBuffer, OffsetBuffer, ScalarBuffer};
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    // The subfields are expected to be non-nullable according to the parquet 
variant spec.
+    let metadata_field = Field::new("metadata", DataType::Binary, false);
+    let value_field = Field::new("value", DataType::Binary, false);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+/// Parse a batch of JSON strings into a batch of Variants represented as
+/// STRUCT<metadata: BINARY, value: BINARY> where nulls are preserved. The 
JSON strings in the input
+/// must be valid.
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {
+    let input_string_array = match 
input.as_any().downcast_ref::<StringArray>() {
+        Some(string_array) => Ok(string_array),
+        None => Err(ArrowError::CastError(
+            "Expected reference to StringArray as input".into(),
+        )),
+    }?;
+
+    // Zero-copy builders
+    let mut metadata_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut metadata_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut metadata_validity = BooleanBufferBuilder::new(input.len());
+    let mut metadata_current_offset: i32 = 0;
+    metadata_offsets.push(metadata_current_offset);
+
+    let mut value_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);
+    let mut value_offsets: Vec<i32> = Vec::with_capacity(input.len() + 1);
+    let mut value_validity = BooleanBufferBuilder::new(input.len());
+    let mut value_current_offset: i32 = 0;
+    value_offsets.push(value_current_offset);
+
+    let mut validity = BooleanBufferBuilder::new(input.len());
+    for i in 0..input.len() {
+        if input.is_null(i) {
+            // The subfields are expected to be non-nullable according to the 
parquet variant spec.
+            metadata_validity.append(true);
+            value_validity.append(true);
+            metadata_offsets.push(metadata_current_offset);
+            value_offsets.push(value_current_offset);
+            validity.append(false);
+        } else {
+            let mut vb = VariantBuilder::new();
+            json_to_variant(input_string_array.value(i), &mut vb)?;
+            let (metadata, value) = vb.finish();
+            validity.append(true);
+
+            metadata_current_offset += metadata.len() as i32;
+            metadata_buffer.extend(metadata);
+            metadata_offsets.push(metadata_current_offset);
+            metadata_validity.append(true);
+
+            value_current_offset += value.len() as i32;
+            value_buffer.extend(value);
+            value_offsets.push(value_current_offset);
+            value_validity.append(true);
+            println!("{value_current_offset} {metadata_current_offset}");
+        }
+    }
+    let metadata_offsets_buffer = 
OffsetBuffer::new(ScalarBuffer::from(metadata_offsets));
+    let metadata_data_buffer = Buffer::from_vec(metadata_buffer);
+    let metadata_null_buffer = NullBuffer::new(metadata_validity.finish());
+
+    let value_offsets_buffer = 
OffsetBuffer::new(ScalarBuffer::from(value_offsets));
+    let value_data_buffer = Buffer::from_vec(value_buffer);
+    let value_null_buffer = NullBuffer::new(value_validity.finish());
+
+    let metadata_array = BinaryArray::new(
+        metadata_offsets_buffer,
+        metadata_data_buffer,
+        Some(metadata_null_buffer),
+    );
+    let value_array = BinaryArray::new(
+        value_offsets_buffer,
+        value_data_buffer,
+        Some(value_null_buffer),
+    );
+
+    let struct_fields: Vec<ArrayRef> = vec![Arc::new(metadata_array), 
Arc::new(value_array)];
+    let variant_fields = match variant_arrow_repr() {
+        DataType::Struct(fields) => fields,
+        _ => unreachable!("variant_arrow_repr is hard-coded and must match the 
expected schema"),
+    };

Review Comment:
   > Should variant_arrow_repr just return Fields instead of DataType? Then we 
don't have to unpack it.
   
   Yes, please



##########
parquet-variant-compute/src/from_json.rs:
##########
@@ -0,0 +1,181 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements.  See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership.  The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License.  You may obtain a copy of the License at
+//
+//   http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied.  See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+//! Module for transforming a batch of JSON strings into a batch of Variants 
represented as
+//! STRUCT<metadata: BINARY, value: BINARY>
+
+use std::sync::Arc;
+
+use arrow::array::{Array, ArrayRef, BinaryArray, BooleanBufferBuilder, 
StringArray, StructArray};
+use arrow::buffer::{Buffer, NullBuffer, OffsetBuffer, ScalarBuffer};
+use arrow::datatypes::{DataType, Field};
+use arrow_schema::ArrowError;
+use parquet_variant::{json_to_variant, VariantBuilder};
+
+fn variant_arrow_repr() -> DataType {
+    // The subfields are expected to be non-nullable according to the parquet 
variant spec.
+    let metadata_field = Field::new("metadata", DataType::Binary, false);
+    let value_field = Field::new("value", DataType::Binary, false);
+    let fields = vec![metadata_field, value_field];
+    DataType::Struct(fields.into())
+}
+
+/// Parse a batch of JSON strings into a batch of Variants represented as
+/// STRUCT<metadata: BINARY, value: BINARY> where nulls are preserved. The 
JSON strings in the input
+/// must be valid.
+pub fn batch_json_string_to_variant(input: &ArrayRef) -> Result<StructArray, 
ArrowError> {
+    let input_string_array = match 
input.as_any().downcast_ref::<StringArray>() {
+        Some(string_array) => Ok(string_array),
+        None => Err(ArrowError::CastError(
+            "Expected reference to StringArray as input".into(),
+        )),
+    }?;
+
+    // Zero-copy builders
+    let mut metadata_buffer: Vec<u8> = Vec::with_capacity(input.len() * 128);

Review Comment:
   I agree we should allocate the normal vec and let it grow -- if we want to 
pre-allocate I think we should allow the user to pass in the allocation size as 
an argument
   
   Maybe something like
   ```rust
   let variant_array = JsonToVariant::new()
     .with_capacity(...)
     .parse(input_array)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: github-unsubscr...@arrow.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org

Reply via email to