Repository: spark
Updated Branches:
  refs/heads/branch-2.2 7fd6d535d -> 20846758b


[SPARK-22688][SQL] Upgrade Janino version to 3.0.8

This PR upgrade Janino version to 3.0.8. [Janino 
3.0.8](https://janino-compiler.github.io/janino/changelog.html) includes an 
important fix to reduce the number of constant pool entries by using 'sipush' 
java bytecode.

* SIPUSH bytecode is not used for short integer constant 
[#33](https://github.com/janino-compiler/janino/issues/33).

Please see detail in [this discussion 
thread](https://github.com/apache/spark/pull/19518#issuecomment-346674976).

Existing tests

Author: Kazuaki Ishizaki <ishiz...@jp.ibm.com>

Closes #19890 from kiszk/SPARK-22688.

(cherry picked from commit 8ae004b4602266d1f210e4c1564246d590412c06)
Signed-off-by: Sean Owen <so...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/20846758
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/20846758
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/20846758

Branch: refs/heads/branch-2.2
Commit: 20846758b7bb797d0eb4f0d80b715441f8ed4afc
Parents: 7fd6d53
Author: Kazuaki Ishizaki <ishiz...@jp.ibm.com>
Authored: Wed Dec 6 16:15:25 2017 -0800
Committer: Sean Owen <so...@cloudera.com>
Committed: Thu Dec 7 12:52:07 2017 -0600

----------------------------------------------------------------------
 dev/deps/spark-deps-hadoop-2.6                                 | 6 +++---
 dev/deps/spark-deps-hadoop-2.7                                 | 6 +++---
 pom.xml                                                        | 2 +-
 .../spark/sql/catalyst/expressions/codegen/CodeGenerator.scala | 6 +++---
 .../main/scala/org/apache/spark/sql/execution/SparkPlan.scala  | 4 ++--
 5 files changed, 12 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/20846758/dev/deps/spark-deps-hadoop-2.6
----------------------------------------------------------------------
diff --git a/dev/deps/spark-deps-hadoop-2.6 b/dev/deps/spark-deps-hadoop-2.6
index 036d811..b182fd2 100644
--- a/dev/deps/spark-deps-hadoop-2.6
+++ b/dev/deps/spark-deps-hadoop-2.6
@@ -31,7 +31,7 @@ commons-beanutils-core-1.8.0.jar
 commons-cli-1.2.jar
 commons-codec-1.10.jar
 commons-collections-3.2.2.jar
-commons-compiler-3.0.7.jar
+commons-compiler-3.0.8.jar
 commons-compress-1.4.1.jar
 commons-configuration-1.6.jar
 commons-crypto-1.0.0.jar
@@ -90,8 +90,8 @@ jackson-mapper-asl-1.9.13.jar
 jackson-module-paranamer-2.6.5.jar
 jackson-module-scala_2.11-2.6.5.jar
 jackson-xc-1.9.13.jar
-janino-3.0.7.jar
-java-xmlbuilder-1.0.jar
+janino-3.0.8.jar
+java-xmlbuilder-1.1.jar
 javassist-3.18.1-GA.jar
 javax.annotation-api-1.2.jar
 javax.inject-1.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/20846758/dev/deps/spark-deps-hadoop-2.7
----------------------------------------------------------------------
diff --git a/dev/deps/spark-deps-hadoop-2.7 b/dev/deps/spark-deps-hadoop-2.7
index 3c75273..542c79e 100644
--- a/dev/deps/spark-deps-hadoop-2.7
+++ b/dev/deps/spark-deps-hadoop-2.7
@@ -31,7 +31,7 @@ commons-beanutils-core-1.8.0.jar
 commons-cli-1.2.jar
 commons-codec-1.10.jar
 commons-collections-3.2.2.jar
-commons-compiler-3.0.7.jar
+commons-compiler-3.0.8.jar
 commons-compress-1.4.1.jar
 commons-configuration-1.6.jar
 commons-crypto-1.0.0.jar
@@ -90,8 +90,8 @@ jackson-mapper-asl-1.9.13.jar
 jackson-module-paranamer-2.6.5.jar
 jackson-module-scala_2.11-2.6.5.jar
 jackson-xc-1.9.13.jar
-janino-3.0.7.jar
-java-xmlbuilder-1.0.jar
+janino-3.0.8.jar
+java-xmlbuilder-1.1.jar
 javassist-3.18.1-GA.jar
 javax.annotation-api-1.2.jar
 javax.inject-1.jar

http://git-wip-us.apache.org/repos/asf/spark/blob/20846758/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 3908aab..c2851a9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -170,7 +170,7 @@
     <!-- org.apache.commons/commons-lang3/-->
     <commons-lang3.version>3.5</commons-lang3.version>
     <datanucleus-core.version>3.2.10</datanucleus-core.version>
-    <janino.version>3.0.7</janino.version>
+    <janino.version>3.0.8</janino.version>
     <jersey.version>2.22.2</jersey.version>
     <joda.version>2.9.3</joda.version>
     <jodd.version>3.5.2</jodd.version>

http://git-wip-us.apache.org/repos/asf/spark/blob/20846758/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
index b61ad42..3964471 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/codegen/CodeGenerator.scala
@@ -30,7 +30,7 @@ import com.google.common.cache.{CacheBuilder, CacheLoader}
 import com.google.common.util.concurrent.{ExecutionError, 
UncheckedExecutionException}
 import org.apache.commons.lang3.exception.ExceptionUtils
 import org.codehaus.commons.compiler.CompileException
-import org.codehaus.janino.{ByteArrayClassLoader, ClassBodyEvaluator, 
JaninoRuntimeException, SimpleCompiler}
+import org.codehaus.janino.{ByteArrayClassLoader, ClassBodyEvaluator, 
InternalCompilerException, SimpleCompiler}
 import org.codehaus.janino.util.ClassFile
 
 import org.apache.spark.{SparkEnv, TaskContext, TaskKilledException}
@@ -1000,10 +1000,10 @@ object CodeGenerator extends Logging {
       evaluator.cook("generated.java", code.body)
       recordCompilationStats(evaluator)
     } catch {
-      case e: JaninoRuntimeException =>
+      case e: InternalCompilerException =>
         val msg = s"failed to compile: $e\n$formatted"
         logError(msg, e)
-        throw new JaninoRuntimeException(msg, e)
+        throw new InternalCompilerException(msg, e)
       case e: CompileException =>
         val msg = s"failed to compile: $e\n$formatted"
         logError(msg, e)

http://git-wip-us.apache.org/repos/asf/spark/blob/20846758/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
index c4ed966..4c17a24 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable.ArrayBuffer
 import scala.concurrent.ExecutionContext
 
 import org.codehaus.commons.compiler.CompileException
-import org.codehaus.janino.JaninoRuntimeException
+import org.codehaus.janino.InternalCompilerException
 
 import org.apache.spark.{broadcast, SparkEnv}
 import org.apache.spark.internal.Logging
@@ -373,7 +373,7 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] with 
Logging with Serializ
     try {
       GeneratePredicate.generate(expression, inputSchema)
     } catch {
-      case e @ (_: JaninoRuntimeException | _: CompileException)
+      case e @ (_: InternalCompilerException | _: CompileException)
           if sqlContext == null || sqlContext.conf.wholeStageFallback =>
         genInterpretedPredicate(expression, inputSchema)
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to