parthchandra commented on code in PR #1387: URL: https://github.com/apache/datafusion-comet/pull/1387#discussion_r1953778478
########## native/core/src/parquet/parquet_support.rs: ########## @@ -618,174 +143,20 @@ fn cast_array( Dictionary(_, _) => Arc::new(casted_dictionary.clone()), _ => take(casted_dictionary.values().as_ref(), dict_array.keys(), None)?, }; - return Ok(spark_cast_postprocess(casted_result, &from_type, to_type)); + return Ok(casted_result); } _ => array, }; let from_type = array.data_type(); - let eval_mode = parquet_options.eval_mode; - let cast_result = match (from_type, to_type) { - (Utf8, Boolean) => spark_cast_utf8_to_boolean::<i32>(&array, eval_mode), - (LargeUtf8, Boolean) => spark_cast_utf8_to_boolean::<i64>(&array, eval_mode), - (Utf8, Timestamp(_, _)) => { - cast_string_to_timestamp(&array, to_type, eval_mode, &parquet_options.timezone) - } - (Utf8, Date32) => cast_string_to_date(&array, to_type, eval_mode), - (Int64, Int32) - | (Int64, Int16) - | (Int64, Int8) - | (Int32, Int16) - | (Int32, Int8) - | (Int16, Int8) - if eval_mode != EvalMode::Try => - { - spark_cast_int_to_int(&array, eval_mode, from_type, to_type) - } - (Utf8, Int8 | Int16 | Int32 | Int64) => { - cast_string_to_int::<i32>(to_type, &array, eval_mode) - } - (LargeUtf8, Int8 | Int16 | Int32 | Int64) => { - cast_string_to_int::<i64>(to_type, &array, eval_mode) - } - (Float64, Utf8) => spark_cast_float64_to_utf8::<i32>(&array, eval_mode), - (Float64, LargeUtf8) => spark_cast_float64_to_utf8::<i64>(&array, eval_mode), - (Float32, Utf8) => spark_cast_float32_to_utf8::<i32>(&array, eval_mode), - (Float32, LargeUtf8) => spark_cast_float32_to_utf8::<i64>(&array, eval_mode), - (Float32, Decimal128(precision, scale)) => { - cast_float32_to_decimal128(&array, *precision, *scale, eval_mode) - } - (Float64, Decimal128(precision, scale)) => { - cast_float64_to_decimal128(&array, *precision, *scale, eval_mode) - } - (Float32, Int8) - | (Float32, Int16) - | (Float32, Int32) - | (Float32, Int64) - | (Float64, Int8) - | (Float64, Int16) - | (Float64, Int32) - | (Float64, Int64) - | (Decimal128(_, _), Int8) - | (Decimal128(_, _), Int16) - | (Decimal128(_, _), Int32) - | (Decimal128(_, _), Int64) - if eval_mode != EvalMode::Try => - { - spark_cast_nonintegral_numeric_to_integral(&array, eval_mode, from_type, to_type) - } - (Struct(_), Utf8) => Ok(casts_struct_to_string(array.as_struct(), parquet_options)?), + match (from_type, to_type) { (Struct(_), Struct(_)) => Ok(cast_struct_to_struct( array.as_struct(), from_type, to_type, parquet_options, )?), - (UInt8 | UInt16 | UInt32 | UInt64, Int8 | Int16 | Int32 | Int64) - if parquet_options.allow_cast_unsigned_ints => - { - Ok(cast_with_options(&array, to_type, &PARQUET_OPTIONS)?) - } - _ if parquet_options.is_adapting_schema - || is_datafusion_spark_compatible( - from_type, - to_type, - parquet_options.allow_incompat, - ) => - { - // use DataFusion cast only when we know that it is compatible with Spark - Ok(cast_with_options(&array, to_type, &PARQUET_OPTIONS)?) - } - _ => { - // we should never reach this code because the Scala code should be checking - // for supported cast operations and falling back to Spark for anything that - // is not yet supported - Err(SparkError::Internal(format!( - "Native cast invoked for unsupported cast from {from_type:?} to {to_type:?}" - ))) - } - }; - Ok(spark_cast_postprocess(cast_result?, from_type, to_type)) -} - -/// Determines if DataFusion supports the given cast in a way that is -/// compatible with Spark -fn is_datafusion_spark_compatible( - from_type: &DataType, - to_type: &DataType, - allow_incompat: bool, -) -> bool { - if from_type == to_type { - return true; - } - match from_type { - DataType::Boolean => matches!( - to_type, - DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::Float32 - | DataType::Float64 - | DataType::Utf8 - ), - DataType::Int8 | DataType::Int16 | DataType::Int32 | DataType::Int64 => { - // note that the cast from Int32/Int64 -> Decimal128 here is actually - // not compatible with Spark (no overflow checks) but we have tests that - // rely on this cast working so we have to leave it here for now - matches!( - to_type, - DataType::Boolean - | DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::Float32 - | DataType::Float64 - | DataType::Decimal128(_, _) - | DataType::Utf8 - ) - } - DataType::Float32 | DataType::Float64 => matches!( - to_type, - DataType::Boolean - | DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::Float32 - | DataType::Float64 - ), - DataType::Decimal128(_, _) | DataType::Decimal256(_, _) => matches!( - to_type, - DataType::Int8 - | DataType::Int16 - | DataType::Int32 - | DataType::Int64 - | DataType::Float32 - | DataType::Float64 - | DataType::Decimal128(_, _) - | DataType::Decimal256(_, _) - | DataType::Utf8 // note that there can be formatting differences - ), - DataType::Utf8 if allow_incompat => matches!( - to_type, - DataType::Binary | DataType::Float32 | DataType::Float64 | DataType::Decimal128(_, _) - ), - DataType::Utf8 => matches!(to_type, DataType::Binary), - DataType::Date32 => matches!(to_type, DataType::Utf8), - DataType::Timestamp(_, _) => { - matches!( - to_type, - DataType::Int64 | DataType::Date32 | DataType::Utf8 | DataType::Timestamp(_, _) - ) - } - DataType::Binary => { - // note that this is not completely Spark compatible because - // DataFusion only supports binary data containing valid UTF-8 strings - matches!(to_type, DataType::Utf8) - } - _ => false, + _ => Ok(cast_with_options(&array, to_type, &PARQUET_OPTIONS)?), Review Comment: +1 ########## native/spark-expr/src/utils.rs: ########## @@ -72,9 +72,6 @@ pub fn array_with_timezone( Some(DataType::Timestamp(_, Some(_))) => { timestamp_ntz_to_timestamp(array, timezone.as_str(), Some(timezone.as_str())) } - Some(DataType::Timestamp(_, None)) => { Review Comment: iirc this was introduced to fix cast issues caught by the fuzz tests which are not part of the ci. Can we run the fuzz tests again to verify that there are no regressions. Ideally, parquet_support should _not_ depend on any functions in utils.rs. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: github-unsubscr...@datafusion.apache.org For additional commands, e-mail: github-h...@datafusion.apache.org