cloud-fan commented on a change in pull request #31349: URL: https://github.com/apache/spark/pull/31349#discussion_r565391447
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala ########## @@ -31,126 +31,20 @@ import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types._ +trait TypeCoercionBase { + def typeCoercionRules: List[Rule[LogicalPlan]] -/** - * A collection of [[Rule]] that can be used to coerce differing types that participate in - * operations into compatible ones. - * - * Notes about type widening / tightest common types: Broadly, there are two cases when we need - * to widen data types (e.g. union, binary comparison). In case 1, we are looking for a common - * data type for two or more data types, and in this case no loss of precision is allowed. Examples - * include type inference in JSON (e.g. what's the column's data type if one row is an integer - * while the other row is a long?). In case 2, we are looking for a widened data type with - * some acceptable loss of precision (e.g. there is no common type for double and decimal because - * double's range is larger than decimal, and yet decimal is more precise than double, but in - * union we would cast the decimal into double). - */ -object TypeCoercion { - - def typeCoercionRules: List[Rule[LogicalPlan]] = - InConversion :: - WidenSetOperationTypes :: - PromoteStrings :: - DecimalPrecision :: - BooleanEquality :: - FunctionArgumentConversion :: - ConcatCoercion :: - MapZipWithCoercion :: - EltCoercion :: - CaseWhenCoercion :: - IfCoercion :: - StackCoercion :: - Division :: - IntegralDivision :: - ImplicitTypeCasts :: - DateTimeOperations :: - WindowFrameCoercion :: - StringLiteralCoercion :: - Nil - - // See https://cwiki.apache.org/confluence/display/Hive/LanguageManual+Types. - // The conversion for integral and floating point types have a linear widening hierarchy: - val numericPrecedence = - IndexedSeq( - ByteType, - ShortType, - IntegerType, - LongType, - FloatType, - DoubleType) + def findTightestCommonType(type1: DataType, type2: DataType): Option[DataType] Review comment: we should document it ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org