coderfender commented on code in PR #3559:
URL: https://github.com/apache/datafusion-comet/pull/3559#discussion_r2886173686
##########
native/spark-expr/src/conversion_funcs/numeric.rs:
##########
@@ -75,6 +75,56 @@ pub(crate) fn
is_df_cast_from_decimal_spark_compatible(to_type: &DataType) -> bo
)
}
+macro_rules! cast_float_to_timestamp_impl {
+ ($array:expr, $builder:expr, $primitive_type:ty, $eval_mode:expr) => {{
+ let arr = $array.as_primitive::<$primitive_type>();
+ for i in 0..arr.len() {
+ if arr.is_null(i) {
+ $builder.append_null();
+ } else {
+ let val = arr.value(i) as f64;
+ // Path 1: NaN/Infinity check - error says TIMESTAMP
+ if val.is_nan() || val.is_infinite() {
+ if $eval_mode == EvalMode::Ansi {
+ return Err(SparkError::CastInvalidValue {
+ value: val.to_string(),
+ from_type: "DOUBLE".to_string(),
+ to_type: "TIMESTAMP".to_string(),
+ });
+ }
+ $builder.append_null();
+ } else {
+ // Path 2: Multiply then check overflow - error says BIGINT
+ let micros = val * MICROS_PER_SECOND as f64;
+ if micros.floor() <= i64::MAX as f64 && micros.ceil() >=
i64::MIN as f64 {
+ $builder.append_value(micros as i64);
+ } else {
+ if $eval_mode == EvalMode::Ansi {
+ let value_str = if micros.is_infinite() {
+ if micros.is_sign_positive() {
+ "Infinity".to_string()
+ } else {
+ "-Infinity".to_string()
+ }
+ } else if micros.is_nan() {
+ "NaN".to_string()
+ } else {
+ format!("{:e}", micros).to_uppercase() + "D"
+ };
+ return Err(SparkError::CastOverFlow {
+ value: value_str,
+ from_type: "DOUBLE".to_string(),
+ to_type: "BIGINT".to_string(),
+ });
+ }
+ $builder.append_null();
+ }
+ }
+ }
+ }
+ }};
Review Comment:
Spark's float errors differ / compose error message based on the input
values
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]