Repository: spark
Updated Branches:
  refs/heads/master 22d4aae8b -> 2f3c20bbd


[SPARK-19446][SQL] Remove unused findTightestCommonType in TypeCoercion

## What changes were proposed in this pull request?

This PR proposes to

- remove unused `findTightestCommonType` in `TypeCoercion` as suggested in 
https://github.com/apache/spark/pull/16777#discussion_r99283834
- rename `findTightestCommonTypeOfTwo ` to `findTightestCommonType`.
- fix comments accordingly

The usage was removed while refactoring/fixing in several JIRAs such as 
SPARK-16714, SPARK-16735 and SPARK-16646

## How was this patch tested?

Existing tests.

Author: hyukjinkwon <gurwls...@gmail.com>

Closes #16786 from HyukjinKwon/SPARK-19446.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2f3c20bb
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2f3c20bb
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2f3c20bb

Branch: refs/heads/master
Commit: 2f3c20bbddd266015d9478c35ce2b37d67e01200
Parents: 22d4aae
Author: hyukjinkwon <gurwls...@gmail.com>
Authored: Fri Feb 3 22:10:17 2017 -0800
Committer: gatorsmile <gatorsm...@gmail.com>
Committed: Fri Feb 3 22:10:17 2017 -0800

----------------------------------------------------------------------
 .../sql/catalyst/analysis/TypeCoercion.scala    | 29 ++++++--------------
 .../catalyst/analysis/TypeCoercionSuite.scala   |  4 +--
 .../datasources/json/InferSchema.scala          |  2 +-
 3 files changed, 12 insertions(+), 23 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2f3c20bb/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
index 4177c2b..c6242e0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercion.scala
@@ -79,7 +79,7 @@ object TypeCoercion {
    * with primitive types, because in that case the precision and scale of the 
result depends on
    * the operation. Those rules are implemented in [[DecimalPrecision]].
    */
-  val findTightestCommonTypeOfTwo: (DataType, DataType) => Option[DataType] = {
+  val findTightestCommonType: (DataType, DataType) => Option[DataType] = {
     case (t1, t2) if t1 == t2 => Some(t1)
     case (NullType, t1) => Some(t1)
     case (t1, NullType) => Some(t1)
@@ -103,7 +103,7 @@ object TypeCoercion {
 
   /** Similar to [[findTightestCommonType]], but can promote all the way to 
StringType. */
   def findTightestCommonTypeToString(left: DataType, right: DataType): 
Option[DataType] = {
-    findTightestCommonTypeOfTwo(left, right).orElse((left, right) match {
+    findTightestCommonType(left, right).orElse((left, right) match {
       case (StringType, t2: AtomicType) if t2 != BinaryType && t2 != 
BooleanType => Some(StringType)
       case (t1: AtomicType, StringType) if t1 != BinaryType && t1 != 
BooleanType => Some(StringType)
       case _ => None
@@ -111,21 +111,10 @@ object TypeCoercion {
   }
 
   /**
-   * Find the tightest common type of a set of types by continuously applying
-   * `findTightestCommonTypeOfTwo` on these types.
-   */
-  private def findTightestCommonType(types: Seq[DataType]): Option[DataType] = 
{
-    types.foldLeft[Option[DataType]](Some(NullType))((r, c) => r match {
-      case None => None
-      case Some(d) => findTightestCommonTypeOfTwo(d, c)
-    })
-  }
-
-  /**
    * Case 2 type widening (see the classdoc comment above for TypeCoercion).
    *
-   * i.e. the main difference with [[findTightestCommonTypeOfTwo]] is that 
here we allow some
-   * loss of precision when widening decimal and double.
+   * i.e. the main difference with [[findTightestCommonType]] is that here we 
allow some
+   * loss of precision when widening decimal and double, and promotion to 
string.
    */
   private def findWiderTypeForTwo(t1: DataType, t2: DataType): 
Option[DataType] = (t1, t2) match {
     case (t1: DecimalType, t2: DecimalType) =>
@@ -148,13 +137,13 @@ object TypeCoercion {
   }
 
   /**
-   * Similar to [[findWiderCommonType]], but can't promote to string. This is 
also similar to
-   * [[findTightestCommonType]], but can handle decimal types. If the wider 
decimal type exceeds
-   * system limitation, this rule will truncate the decimal type before return 
it.
+   * Similar to [[findWiderCommonType]] that can handle decimal types, but 
can't promote to
+   * string. If the wider decimal type exceeds system limitation, this rule 
will truncate
+   * the decimal type before return it.
    */
   def findWiderTypeWithoutStringPromotion(types: Seq[DataType]): 
Option[DataType] = {
     types.foldLeft[Option[DataType]](Some(NullType))((r, c) => r match {
-      case Some(d) => findTightestCommonTypeOfTwo(d, c).orElse((d, c) match {
+      case Some(d) => findTightestCommonType(d, c).orElse((d, c) match {
         case (t1: DecimalType, t2: DecimalType) =>
           Some(DecimalPrecision.widerDecimalType(t1, t2))
         case (t: IntegralType, d: DecimalType) =>
@@ -621,7 +610,7 @@ object TypeCoercion {
       case e if !e.childrenResolved => e
 
       case b @ BinaryOperator(left, right) if left.dataType != right.dataType 
=>
-        findTightestCommonTypeOfTwo(left.dataType, right.dataType).map { 
commonType =>
+        findTightestCommonType(left.dataType, right.dataType).map { commonType 
=>
           if (b.inputType.acceptsType(commonType)) {
             // If the expression accepts the tightest common type, cast to 
that.
             val newLeft = if (left.dataType == commonType) left else 
Cast(left, commonType)

http://git-wip-us.apache.org/repos/asf/spark/blob/2f3c20bb/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
index 110bd02..ceb5b53 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/TypeCoercionSuite.scala
@@ -309,11 +309,11 @@ class TypeCoercionSuite extends PlanTest {
 
   test("tightest common bound for types") {
     def widenTest(t1: DataType, t2: DataType, tightestCommon: 
Option[DataType]) {
-      var found = TypeCoercion.findTightestCommonTypeOfTwo(t1, t2)
+      var found = TypeCoercion.findTightestCommonType(t1, t2)
       assert(found == tightestCommon,
         s"Expected $tightestCommon as tightest common type for $t1 and $t2, 
found $found")
       // Test both directions to make sure the widening is symmetric.
-      found = TypeCoercion.findTightestCommonTypeOfTwo(t2, t1)
+      found = TypeCoercion.findTightestCommonType(t2, t1)
       assert(found == tightestCommon,
         s"Expected $tightestCommon as tightest common type for $t2 and $t1, 
found $found")
     }

http://git-wip-us.apache.org/repos/asf/spark/blob/2f3c20bb/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
index dc8bd81..330d04d 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/json/InferSchema.scala
@@ -253,7 +253,7 @@ private[sql] object InferSchema {
    * Returns the most general data type for two given data types.
    */
   def compatibleType(t1: DataType, t2: DataType): DataType = {
-    TypeCoercion.findTightestCommonTypeOfTwo(t1, t2).getOrElse {
+    TypeCoercion.findTightestCommonType(t1, t2).getOrElse {
       // t1 or t2 is a StructType, ArrayType, or an unexpected type.
       (t1, t2) match {
         // Double support larger range than fixed decimal, DecimalType.Maximum 
should be enough


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to