Repository: spark
Updated Branches:
  refs/heads/master d403562eb -> fe767395f


[SPARK-17429][SQL] use ImplicitCastInputTypes with function Length

## What changes were proposed in this pull request?
select length(11);
select length(2.0);
these sql will return errors, but hive is ok.
this PR will support casting input types implicitly for function length
the correct result is:
select length(11) return 2
select length(2.0) return 3

Author: 岑玉海 <261810...@qq.com>
Author: cenyuhai <cenyu...@didichuxing.com>

Closes #15014 from cenyuhai/SPARK-17429.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/fe767395
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/fe767395
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/fe767395

Branch: refs/heads/master
Commit: fe767395ff46ee6236cf53aece85fcd61c0b49d3
Parents: d403562
Author: 岑玉海 <261810...@qq.com>
Authored: Thu Sep 15 20:45:00 2016 +0200
Committer: Herman van Hovell <hvanhov...@databricks.com>
Committed: Thu Sep 15 20:45:00 2016 +0200

----------------------------------------------------------------------
 .../sql/catalyst/expressions/stringExpressions.scala      |  2 +-
 .../scala/org/apache/spark/sql/StringFunctionsSuite.scala | 10 ++++++----
 2 files changed, 7 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/fe767395/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
index a8c23a8..1bcbb6c 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/stringExpressions.scala
@@ -1057,7 +1057,7 @@ case class Substring(str: Expression, pos: Expression, 
len: Expression)
 @ExpressionDescription(
   usage = "_FUNC_(str | binary) - Returns the length of str or number of bytes 
in binary data.",
   extended = "> SELECT _FUNC_('Spark SQL');\n 9")
-case class Length(child: Expression) extends UnaryExpression with 
ExpectsInputTypes {
+case class Length(child: Expression) extends UnaryExpression with 
ImplicitCastInputTypes {
   override def dataType: DataType = IntegerType
   override def inputTypes: Seq[AbstractDataType] = 
Seq(TypeCollection(StringType, BinaryType))
 

http://git-wip-us.apache.org/repos/asf/spark/blob/fe767395/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala 
b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
index 1cc7746..bcc2351 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala
@@ -330,7 +330,8 @@ class StringFunctionsSuite extends QueryTest with 
SharedSQLContext {
   }
 
   test("string / binary length function") {
-    val df = Seq(("123", Array[Byte](1, 2, 3, 4), 123)).toDF("a", "b", "c")
+    val df = Seq(("123", Array[Byte](1, 2, 3, 4), 123, 2.0f, 3.015))
+      .toDF("a", "b", "c", "d", "e")
     checkAnswer(
       df.select(length($"a"), length($"b")),
       Row(3, 4))
@@ -339,9 +340,10 @@ class StringFunctionsSuite extends QueryTest with 
SharedSQLContext {
       df.selectExpr("length(a)", "length(b)"),
       Row(3, 4))
 
-    intercept[AnalysisException] {
-      df.selectExpr("length(c)") // int type of the argument is unacceptable
-    }
+    checkAnswer(
+      df.selectExpr("length(c)", "length(d)", "length(e)"),
+      Row(3, 3, 5)
+    )
   }
 
   test("initcap function") {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to