AngersZhuuuu commented on a change in pull request #32314: URL: https://github.com/apache/spark/pull/32314#discussion_r619202058
########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala ########## @@ -402,6 +403,19 @@ case class DivideYMInterval( override def inputTypes: Seq[AbstractDataType] = Seq(YearMonthIntervalType, NumericType) override def dataType: DataType = YearMonthIntervalType + def checkDivideOverflow(month: Int, num: Any): Unit = { + if (month == Int.MinValue) { + num match { + case l: Long if l == -1L => throw QueryExecutionErrors.overflowInIntegralDivideError() + case number: Number if number.doubleValue() == -1.0D => + throw QueryExecutionErrors.overflowInIntegralDivideError() + case decimal: Decimal if decimal.equals(Decimal.apply(-1)) => Review comment: But here we will get a int value. so it still should overflow ########## File path: sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/intervalExpressions.scala ########## @@ -402,6 +403,19 @@ case class DivideYMInterval( override def inputTypes: Seq[AbstractDataType] = Seq(YearMonthIntervalType, NumericType) override def dataType: DataType = YearMonthIntervalType + def checkDivideOverflow(month: Int, num: Any): Unit = { + if (month == Int.MinValue) { + num match { + case l: Long if l == -1L => throw QueryExecutionErrors.overflowInIntegralDivideError() + case number: Number if number.doubleValue() == -1.0D => + throw QueryExecutionErrors.overflowInIntegralDivideError() + case decimal: Decimal if decimal.equals(Decimal.apply(-1)) => Review comment: Gentle ping @cloud-fan @maropu @MaxGekk @beliefer @wangyum Hope to got more suggestions from your guys about the behavior here -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org