Frederik Schreiber created SPARK-30711:
------------------------------------------

             Summary: 64KB JBM bytecode limit - janino.InternalCompilerException
                 Key: SPARK-30711
                 URL: https://issues.apache.org/jira/browse/SPARK-30711
             Project: Spark
          Issue Type: Bug
          Components: SQL
    Affects Versions: 2.4.4
         Environment: Windows 10

Spark 2.4.4

scalaVersion 2.11.12
            Reporter: Frederik Schreiber


{code:java}
ERROR CodeGenerator: failed to compile: 
org.codehaus.janino.InternalCompilerException: Compiling "GeneratedClass": Code 
of method "processNext()V" of class 
"org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage4"
 grows beyond 64 KBERROR CodeGenerator: failed to compile: 
org.codehaus.janino.InternalCompilerException: Compiling "GeneratedClass": Code 
of method "processNext()V" of class 
"org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage4"
 grows beyond 64 KBorg.codehaus.janino.InternalCompilerException: Compiling 
"GeneratedClass": Code of method "processNext()V" of class 
"org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage4"
 grows beyond 64 KB at 
org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:382) at 
org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:237) at 
org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:465)
 at 
org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:313)
 at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:235) at 
org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:207) at 
org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1290)
 at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1372)
 at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1369)
 at 
org.spark_project.guava.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
 at 
org.spark_project.guava.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) 
at 
org.spark_project.guava.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
 at org.spark_project.guava.cache.LocalCache$Segment.get(LocalCache.java:2257) 
at org.spark_project.guava.cache.LocalCache.get(LocalCache.java:4000) at 
org.spark_project.guava.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
org.spark_project.guava.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
 at 
org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1238)
 at 
org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:584)
 at 
org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:583)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
 at 
org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
 at 
org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) 
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) 
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at 
org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:247) 
at org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:296) 
at 
org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collectFromPlan(Dataset.scala:3384)
 at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2783) 
at org.apache.spark.sql.Dataset$$anonfun$collect$1.apply(Dataset.scala:2783) at 
org.apache.spark.sql.Dataset$$anonfun$53.apply(Dataset.scala:3365) at 
org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
 at 
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
 at 
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
 at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3364) at 
org.apache.spark.sql.Dataset.collect(Dataset.scala:2783) at 
de.sparkbug.janino.SparkJaninoBug$$anonfun$1.apply(SparkJaninoBug.scala:105) at 
de.sparkbug.janino.SparkJaninoBug$$anonfun$1.apply(SparkJaninoBug.scala:12) at 
org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
org.scalatest.Transformer.apply(Transformer.scala:22) at 
org.scalatest.Transformer.apply(Transformer.scala:20) at 
org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:186) at 
org.scalatest.TestSuite$class.withFixture(TestSuite.scala:196) at 
org.scalatest.FunSuite.withFixture(FunSuite.scala:1560) at 
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:183) at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:196) at 
org.scalatest.SuperEngine.runTestImpl(Engine.scala:286) at 
org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:196) at 
org.scalatest.FunSuite.runTest(FunSuite.scala:1560) at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:229) at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:393) 
at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:381) 
at scala.collection.immutable.List.foreach(List.scala:392) at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:381) at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:376)
 at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:458) at 
org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:229) at 
org.scalatest.FunSuite.runTests(FunSuite.scala:1560) at 
org.scalatest.Suite$class.run(Suite.scala:1124) at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1560)
 at org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:233) at 
org.scalatest.SuperEngine.runImpl(Engine.scala:518) at 
org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:233) at 
org.scalatest.FunSuite.run(FunSuite.scala:1560) at 
org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1349)
 at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1343)
 at scala.collection.immutable.List.foreach(List.scala:392) at 
org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1343) at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1012)
 at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
 at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1509)
 at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1011)
 at org.scalatest.tools.Runner$.run(Runner.scala:850) at 
org.scalatest.tools.Runner.run(Runner.scala) at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
 at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)Caused
 by: org.codehaus.janino.InternalCompilerException: Code of method 
"processNext()V" of class 
"org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage4"
 grows beyond 64 KB at 
org.codehaus.janino.CodeContext.makeSpace(CodeContext.java:1009){code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to