This is an automated email from the ASF dual-hosted git repository.

joshrosen pushed a commit to branch branch-2.4
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-2.4 by this push:
     new a71e90a  [SPARK-26038][BRANCH-2.4] Decimal 
toScalaBigInt/toJavaBigInteger for decimals not fitting in long
a71e90a is described below

commit a71e90a76a982dde09d3b60bb2cf4548c62f57a1
Author: Juliusz Sompolski <ju...@databricks.com>
AuthorDate: Fri Jun 21 07:56:49 2019 -0700

    [SPARK-26038][BRANCH-2.4] Decimal toScalaBigInt/toJavaBigInteger for 
decimals not fitting in long
    
    This is a Spark 2.4.x backport of #23022. Original description follows 
below:
    
    ## What changes were proposed in this pull request?
    
    Fix Decimal `toScalaBigInt` and `toJavaBigInteger` used to only work for 
decimals not fitting long.
    
    ## How was this patch tested?
    
    Added test to DecimalSuite.
    
    Closes #24928 from JoshRosen/joshrosen/SPARK-26038-backport.
    
    Authored-by: Juliusz Sompolski <ju...@databricks.com>
    Signed-off-by: Josh Rosen <rosenvi...@gmail.com>
---
 .../main/scala/org/apache/spark/sql/types/Decimal.scala  | 16 ++++++++++++++--
 .../scala/org/apache/spark/sql/types/DecimalSuite.scala  | 11 +++++++++++
 2 files changed, 25 insertions(+), 2 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
index 9eed2eb..12182324 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/types/Decimal.scala
@@ -185,9 +185,21 @@ final class Decimal extends Ordered[Decimal] with 
Serializable {
     }
   }
 
-  def toScalaBigInt: BigInt = BigInt(toLong)
+  def toScalaBigInt: BigInt = {
+    if (decimalVal.ne(null)) {
+      decimalVal.toBigInt()
+    } else {
+      BigInt(toLong)
+    }
+  }
 
-  def toJavaBigInteger: java.math.BigInteger = 
java.math.BigInteger.valueOf(toLong)
+  def toJavaBigInteger: java.math.BigInteger = {
+    if (decimalVal.ne(null)) {
+      decimalVal.underlying().toBigInteger()
+    } else {
+      java.math.BigInteger.valueOf(toLong)
+    }
+  }
 
   def toUnscaledLong: Long = {
     if (decimalVal.ne(null)) {
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
index 10de90c..8abd762 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/types/DecimalSuite.scala
@@ -228,4 +228,15 @@ class DecimalSuite extends SparkFunSuite with 
PrivateMethodTester {
     val decimal = Decimal.apply(bigInt)
     assert(decimal.toJavaBigDecimal.unscaledValue.toString === 
"9223372036854775808")
   }
+
+  test("SPARK-26038: toScalaBigInt/toJavaBigInteger") {
+    // not fitting long
+    val decimal = 
Decimal("1234568790123456789012348790.1234879012345678901234568790")
+    assert(decimal.toScalaBigInt == 
scala.math.BigInt("1234568790123456789012348790"))
+    assert(decimal.toJavaBigInteger == new 
java.math.BigInteger("1234568790123456789012348790"))
+    // fitting long
+    val decimalLong = Decimal(123456789123456789L, 18, 9)
+    assert(decimalLong.toScalaBigInt == scala.math.BigInt("123456789"))
+    assert(decimalLong.toJavaBigInteger == new 
java.math.BigInteger("123456789"))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to