spark git commit: [SPARK-20665][SQL] Bround" and "Round" function return NULL
Repository: spark Updated Branches: refs/heads/branch-2.0 d86dae8fe -> b2d0ed287 [SPARK-20665][SQL] Bround" and "Round" function return NULL spark-sql>select bround(12.3, 2); spark-sql>NULL For this case, the expected result is 12.3, but it is null. So ,when the second parameter is bigger than "decimal.scala", the result is not we expected. "round" function has the same problem. This PR can solve the problem for both of them. unit test cases in MathExpressionsSuite and MathFunctionsSuite Author: liuxianCloses #17906 from 10110346/wip_lx_0509. (cherry picked from commit 2b36eb696f6c738e1328582630755aaac4293460) Signed-off-by: Wenchen Fan Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b2d0ed28 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b2d0ed28 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b2d0ed28 Branch: refs/heads/branch-2.0 Commit: b2d0ed2875fcc90a3ac70e857eb42bce9055e6d6 Parents: d86dae8 Author: liuxian Authored: Fri May 12 11:38:50 2017 +0800 Committer: Wenchen Fan Committed: Fri May 12 11:43:21 2017 +0800 -- .../sql/catalyst/expressions/mathExpressions.scala | 12 ++-- .../sql/catalyst/expressions/MathFunctionsSuite.scala | 7 +++ .../org/apache/spark/sql/MathExpressionsSuite.scala| 13 + 3 files changed, 22 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/b2d0ed28/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index 591e1e5..c7dfeb7 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -845,10 +845,10 @@ abstract class RoundBase(child: Expression, scale: Expression, // not overriding since _scale is a constant int at runtime def nullSafeEval(input1: Any): Any = { -child.dataType match { - case _: DecimalType => +dataType match { + case DecimalType.Fixed(_, s) => val decimal = input1.asInstanceOf[Decimal] -if (decimal.changePrecision(decimal.precision, _scale, mode)) decimal else null +if (decimal.changePrecision(decimal.precision, s, mode)) decimal else null case ByteType => BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte case ShortType => @@ -877,10 +877,10 @@ abstract class RoundBase(child: Expression, scale: Expression, override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val ce = child.genCode(ctx) -val evaluationCode = child.dataType match { - case _: DecimalType => +val evaluationCode = dataType match { + case DecimalType.Fixed(_, s) => s""" -if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale}, +if (${ce.value}.changePrecision(${ce.value}.precision(), ${s}, java.math.BigDecimal.${modeStr})) { ${ev.value} = ${ce.value}; } else { http://git-wip-us.apache.org/repos/asf/spark/blob/b2d0ed28/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala -- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala index f88c9e8..a08db2f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathFunctionsSuite.scala @@ -546,15 +546,14 @@ class MathFunctionsSuite extends SparkFunSuite with ExpressionEvalHelper { val bdResults: Seq[BigDecimal] = Seq(BigDecimal(3.0), BigDecimal(3.1), BigDecimal(3.14), BigDecimal(3.142), BigDecimal(3.1416), BigDecimal(3.14159), BigDecimal(3.141593), BigDecimal(3.1415927)) -// round_scale > current_scale would result in precision increase -// and not allowed by o.a.s.s.types.Decimal.changePrecision, therefore null + (0 to 7).foreach { i => checkEvaluation(Round(bdPi, i), bdResults(i), EmptyRow) checkEvaluation(BRound(bdPi, i), bdResults(i), EmptyRow) } (8 to 10).foreach { scale => - checkEvaluation(Round(bdPi, scale), null, EmptyRow) -
spark git commit: [SPARK-20665][SQL] Bround" and "Round" function return NULL
Repository: spark Updated Branches: refs/heads/branch-2.1 92a71a667 -> 6e89d5740 [SPARK-20665][SQL] Bround" and "Round" function return NULL spark-sql>select bround(12.3, 2); spark-sql>NULL For this case, the expected result is 12.3, but it is null. So ,when the second parameter is bigger than "decimal.scala", the result is not we expected. "round" function has the same problem. This PR can solve the problem for both of them. unit test cases in MathExpressionsSuite and MathFunctionsSuite Author: liuxianCloses #17906 from 10110346/wip_lx_0509. (cherry picked from commit 2b36eb696f6c738e1328582630755aaac4293460) Signed-off-by: Wenchen Fan Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/6e89d574 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/6e89d574 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/6e89d574 Branch: refs/heads/branch-2.1 Commit: 6e89d574058bc2b96b14a691a07580be67f63707 Parents: 92a71a6 Author: liuxian Authored: Fri May 12 11:38:50 2017 +0800 Committer: Wenchen Fan Committed: Fri May 12 11:41:40 2017 +0800 -- .../sql/catalyst/expressions/mathExpressions.scala | 12 ++-- .../catalyst/expressions/MathExpressionsSuite.scala| 7 +++ .../org/apache/spark/sql/MathFunctionsSuite.scala | 13 + 3 files changed, 22 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/6e89d574/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index 65273a7..54b8457 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -1021,10 +1021,10 @@ abstract class RoundBase(child: Expression, scale: Expression, // not overriding since _scale is a constant int at runtime def nullSafeEval(input1: Any): Any = { -child.dataType match { - case _: DecimalType => +dataType match { + case DecimalType.Fixed(_, s) => val decimal = input1.asInstanceOf[Decimal] -if (decimal.changePrecision(decimal.precision, _scale, mode)) decimal else null +if (decimal.changePrecision(decimal.precision, s, mode)) decimal else null case ByteType => BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte case ShortType => @@ -1053,10 +1053,10 @@ abstract class RoundBase(child: Expression, scale: Expression, override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val ce = child.genCode(ctx) -val evaluationCode = child.dataType match { - case _: DecimalType => +val evaluationCode = dataType match { + case DecimalType.Fixed(_, s) => s""" -if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale}, +if (${ce.value}.changePrecision(${ce.value}.precision(), ${s}, java.math.BigDecimal.${modeStr})) { ${ev.value} = ${ce.value}; } else { http://git-wip-us.apache.org/repos/asf/spark/blob/6e89d574/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala -- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala index 6b5bfac..1555dd1 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala @@ -546,15 +546,14 @@ class MathExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { val bdResults: Seq[BigDecimal] = Seq(BigDecimal(3.0), BigDecimal(3.1), BigDecimal(3.14), BigDecimal(3.142), BigDecimal(3.1416), BigDecimal(3.14159), BigDecimal(3.141593), BigDecimal(3.1415927)) -// round_scale > current_scale would result in precision increase -// and not allowed by o.a.s.s.types.Decimal.changePrecision, therefore null + (0 to 7).foreach { i => checkEvaluation(Round(bdPi, i), bdResults(i), EmptyRow) checkEvaluation(BRound(bdPi, i), bdResults(i), EmptyRow) } (8 to 10).foreach { scale => - checkEvaluation(Round(bdPi, scale), null,
spark git commit: [SPARK-20665][SQL] Bround" and "Round" function return NULL
Repository: spark Updated Branches: refs/heads/branch-2.2 3d1908fd5 -> 2cac317a8 [SPARK-20665][SQL] Bround" and "Round" function return NULL ## What changes were proposed in this pull request? spark-sql>select bround(12.3, 2); spark-sql>NULL For this case, the expected result is 12.3, but it is null. So ,when the second parameter is bigger than "decimal.scala", the result is not we expected. "round" function has the same problem. This PR can solve the problem for both of them. ## How was this patch tested? unit test cases in MathExpressionsSuite and MathFunctionsSuite Author: liuxianCloses #17906 from 10110346/wip_lx_0509. (cherry picked from commit 2b36eb696f6c738e1328582630755aaac4293460) Signed-off-by: Wenchen Fan Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2cac317a Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2cac317a Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2cac317a Branch: refs/heads/branch-2.2 Commit: 2cac317a84a234f034b0c75dcb5e4c27860a4cc0 Parents: 3d1908f Author: liuxian Authored: Fri May 12 11:38:50 2017 +0800 Committer: Wenchen Fan Committed: Fri May 12 11:39:02 2017 +0800 -- .../sql/catalyst/expressions/mathExpressions.scala | 12 ++-- .../catalyst/expressions/MathExpressionsSuite.scala| 7 +++ .../org/apache/spark/sql/MathFunctionsSuite.scala | 13 + 3 files changed, 22 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/2cac317a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index c4d47ab..de1a46d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -1023,10 +1023,10 @@ abstract class RoundBase(child: Expression, scale: Expression, // not overriding since _scale is a constant int at runtime def nullSafeEval(input1: Any): Any = { -child.dataType match { - case _: DecimalType => +dataType match { + case DecimalType.Fixed(_, s) => val decimal = input1.asInstanceOf[Decimal] -decimal.toPrecision(decimal.precision, _scale, mode).orNull +decimal.toPrecision(decimal.precision, s, mode).orNull case ByteType => BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte case ShortType => @@ -1055,10 +1055,10 @@ abstract class RoundBase(child: Expression, scale: Expression, override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val ce = child.genCode(ctx) -val evaluationCode = child.dataType match { - case _: DecimalType => +val evaluationCode = dataType match { + case DecimalType.Fixed(_, s) => s""" -if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale}, +if (${ce.value}.changePrecision(${ce.value}.precision(), ${s}, java.math.BigDecimal.${modeStr})) { ${ev.value} = ${ce.value}; } else { http://git-wip-us.apache.org/repos/asf/spark/blob/2cac317a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala -- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala index 6b5bfac..1555dd1 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala @@ -546,15 +546,14 @@ class MathExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { val bdResults: Seq[BigDecimal] = Seq(BigDecimal(3.0), BigDecimal(3.1), BigDecimal(3.14), BigDecimal(3.142), BigDecimal(3.1416), BigDecimal(3.14159), BigDecimal(3.141593), BigDecimal(3.1415927)) -// round_scale > current_scale would result in precision increase -// and not allowed by o.a.s.s.types.Decimal.changePrecision, therefore null + (0 to 7).foreach { i => checkEvaluation(Round(bdPi, i), bdResults(i), EmptyRow) checkEvaluation(BRound(bdPi, i), bdResults(i), EmptyRow) } (8 to 10).foreach { scale => -
spark git commit: [SPARK-20665][SQL] Bround" and "Round" function return NULL
Repository: spark Updated Branches: refs/heads/master 609ba5f2b -> 2b36eb696 [SPARK-20665][SQL] Bround" and "Round" function return NULL ## What changes were proposed in this pull request? spark-sql>select bround(12.3, 2); spark-sql>NULL For this case, the expected result is 12.3, but it is null. So ,when the second parameter is bigger than "decimal.scala", the result is not we expected. "round" function has the same problem. This PR can solve the problem for both of them. ## How was this patch tested? unit test cases in MathExpressionsSuite and MathFunctionsSuite Author: liuxianCloses #17906 from 10110346/wip_lx_0509. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2b36eb69 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2b36eb69 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2b36eb69 Branch: refs/heads/master Commit: 2b36eb696f6c738e1328582630755aaac4293460 Parents: 609ba5f Author: liuxian Authored: Fri May 12 11:38:50 2017 +0800 Committer: Wenchen Fan Committed: Fri May 12 11:38:50 2017 +0800 -- .../sql/catalyst/expressions/mathExpressions.scala | 12 ++-- .../catalyst/expressions/MathExpressionsSuite.scala| 7 +++ .../org/apache/spark/sql/MathFunctionsSuite.scala | 13 + 3 files changed, 22 insertions(+), 10 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/2b36eb69/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala -- diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala index c4d47ab..de1a46d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala @@ -1023,10 +1023,10 @@ abstract class RoundBase(child: Expression, scale: Expression, // not overriding since _scale is a constant int at runtime def nullSafeEval(input1: Any): Any = { -child.dataType match { - case _: DecimalType => +dataType match { + case DecimalType.Fixed(_, s) => val decimal = input1.asInstanceOf[Decimal] -decimal.toPrecision(decimal.precision, _scale, mode).orNull +decimal.toPrecision(decimal.precision, s, mode).orNull case ByteType => BigDecimal(input1.asInstanceOf[Byte]).setScale(_scale, mode).toByte case ShortType => @@ -1055,10 +1055,10 @@ abstract class RoundBase(child: Expression, scale: Expression, override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = { val ce = child.genCode(ctx) -val evaluationCode = child.dataType match { - case _: DecimalType => +val evaluationCode = dataType match { + case DecimalType.Fixed(_, s) => s""" -if (${ce.value}.changePrecision(${ce.value}.precision(), ${_scale}, +if (${ce.value}.changePrecision(${ce.value}.precision(), ${s}, java.math.BigDecimal.${modeStr})) { ${ev.value} = ${ce.value}; } else { http://git-wip-us.apache.org/repos/asf/spark/blob/2b36eb69/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala -- diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala index 6b5bfac..1555dd1 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/MathExpressionsSuite.scala @@ -546,15 +546,14 @@ class MathExpressionsSuite extends SparkFunSuite with ExpressionEvalHelper { val bdResults: Seq[BigDecimal] = Seq(BigDecimal(3.0), BigDecimal(3.1), BigDecimal(3.14), BigDecimal(3.142), BigDecimal(3.1416), BigDecimal(3.14159), BigDecimal(3.141593), BigDecimal(3.1415927)) -// round_scale > current_scale would result in precision increase -// and not allowed by o.a.s.s.types.Decimal.changePrecision, therefore null + (0 to 7).foreach { i => checkEvaluation(Round(bdPi, i), bdResults(i), EmptyRow) checkEvaluation(BRound(bdPi, i), bdResults(i), EmptyRow) } (8 to 10).foreach { scale => - checkEvaluation(Round(bdPi, scale), null, EmptyRow) - checkEvaluation(BRound(bdPi, scale), null, EmptyRow) +