Repository: spark
Updated Branches:
  refs/heads/branch-1.6 95105b0e6 -> bea91a9e9


[SPARK-13242] [SQL] codegen fallback in case-when if there many branches

## What changes were proposed in this pull request?

If there are many branches in a CaseWhen expression, the generated code could 
go above the 64K limit for single java method, will fail to compile. This PR 
change it to fallback to interpret mode if there are more than 20 branches.

## How was this patch tested?

Add tests

Author: Davies Liu <dav...@databricks.com>

Closes #11606 from davies/fix_when_16.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bea91a9e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bea91a9e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bea91a9e

Branch: refs/heads/branch-1.6
Commit: bea91a9e94341b4cab1977911e91d56016c55cb3
Parents: 95105b0
Author: Davies Liu <dav...@databricks.com>
Authored: Wed Mar 9 12:05:34 2016 -0800
Committer: Davies Liu <davies....@gmail.com>
Committed: Wed Mar 9 12:05:34 2016 -0800

----------------------------------------------------------------------
 .../expressions/conditionalExpressions.scala    | 23 ++++++++++++++++++--
 .../expressions/CodeGenerationSuite.scala       | 22 +++++++++++++++++++
 2 files changed, 43 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bea91a9e/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
index 40b1eec..c4e5b84 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/conditionalExpressions.scala
@@ -110,6 +110,14 @@ trait CaseWhenLike extends Expression {
     // If no value is nullable and no elseValue is provided, the whole 
statement defaults to null.
     thenList.exists(_.nullable) || (elseValue.map(_.nullable).getOrElse(true))
   }
+
+  /**
+   * Whether should it fallback to interpret mode or not.
+   * @return
+   */
+  protected def shouldFallback: Boolean = {
+    branches.length > 20
+  }
 }
 
 // scalastyle:off
@@ -119,7 +127,7 @@ trait CaseWhenLike extends Expression {
  * 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF#LanguageManualUDF-ConditionalFunctions
  */
 // scalastyle:on
-case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike {
+case class CaseWhen(branches: Seq[Expression]) extends CaseWhenLike with 
CodegenFallback {
 
   // Use private[this] Array to speed up evaluation.
   @transient private[this] lazy val branchesArr = branches.toArray
@@ -157,6 +165,11 @@ case class CaseWhen(branches: Seq[Expression]) extends 
CaseWhenLike {
   }
 
   override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): 
String = {
+    if (shouldFallback) {
+      // Fallback to interpreted mode if there are too many branches, as it 
may reach the
+      // 64K limit (limit on bytecode size for a single function).
+      return super[CodegenFallback].genCode(ctx, ev)
+    }
     val len = branchesArr.length
     val got = ctx.freshName("got")
 
@@ -213,7 +226,8 @@ case class CaseWhen(branches: Seq[Expression]) extends 
CaseWhenLike {
  * 
https://cwiki.apache.org/confluence/display/Hive/LanguageManual+UDF#LanguageManualUDF-ConditionalFunctions
  */
 // scalastyle:on
-case class CaseKeyWhen(key: Expression, branches: Seq[Expression]) extends 
CaseWhenLike {
+case class CaseKeyWhen(key: Expression, branches: Seq[Expression])
+  extends CaseWhenLike with CodegenFallback {
 
   // Use private[this] Array to speed up evaluation.
   @transient private[this] lazy val branchesArr = branches.toArray
@@ -257,6 +271,11 @@ case class CaseKeyWhen(key: Expression, branches: 
Seq[Expression]) extends CaseW
   }
 
   override def genCode(ctx: CodeGenContext, ev: GeneratedExpressionCode): 
String = {
+    if (shouldFallback) {
+      // Fallback to interpreted mode if there are too many branches, as it 
may reach the
+      // 64K limit (limit on bytecode size for a single function).
+      return super[CodegenFallback].genCode(ctx, ev)
+    }
     val keyEval = key.gen(ctx)
     val len = branchesArr.length
     val got = ctx.freshName("got")

http://git-wip-us.apache.org/repos/asf/spark/blob/bea91a9e/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
index cd2ef7d..66a7e22 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CodeGenerationSuite.scala
@@ -59,6 +59,28 @@ class CodeGenerationSuite extends SparkFunSuite with 
ExpressionEvalHelper {
     }
   }
 
+  test("SPARK-13242: case-when expression with large number of branches (or 
cases)") {
+    val cases = 50
+    val clauses = 20
+
+    // Generate an individual case
+    def generateCase(n: Int): Seq[Expression] = {
+      val condition = (1 to clauses)
+        .map(c => EqualTo(BoundReference(0, StringType, false), 
Literal(s"$c:$n")))
+        .reduceLeft[Expression]((l, r) => Or(l, r))
+      Seq(condition, Literal(n))
+    }
+
+    val expression = CaseWhen((1 to cases).flatMap(generateCase(_)))
+
+    val plan = GenerateMutableProjection.generate(Seq(expression))()
+    val input = new 
GenericMutableRow(Array[Any](UTF8String.fromString(s"${clauses}:${cases}")))
+    val actual = plan(input).toSeq(Seq(expression.dataType))
+
+    assert(actual(0) == cases)
+  }
+
+
   test("test generated safe and unsafe projection") {
     val schema = new StructType(Array(
       StructField("a", StringType, true),


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to