[ 
https://issues.apache.org/jira/browse/SPARK-34796?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17304550#comment-17304550
 ] 

Cheng Su commented on SPARK-34796:
----------------------------------

FYI I am working on a PR to fix it now.

> Codegen compilation error for query with LIMIT operator and without AQE
> -----------------------------------------------------------------------
>
>                 Key: SPARK-34796
>                 URL: https://issues.apache.org/jira/browse/SPARK-34796
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.1.0, 3.2.0, 3.1.1
>            Reporter: Cheng Su
>            Priority: Blocker
>
> Example (reproduced in unit test):
>  
> {code:java}
>   test("failed limit query") {
>     withTable("left_table", "empty_right_table", "output_table") {
>       spark.range(5).toDF("k").write.saveAsTable("left_table")
>       spark.range(0).toDF("k").write.saveAsTable("empty_right_table")      
> withSQLConf(SQLConf.ADAPTIVE_EXECUTION_ENABLED.key -> "false") {
>         spark.sql("CREATE TABLE output_table (k INT) USING parquet")
>         spark.sql(
>           s"""
>              |INSERT INTO TABLE output_table
>              |SELECT t1.k FROM left_table t1
>              |JOIN empty_right_table t2
>              |ON t1.k = t2.k
>              |LIMIT 3
>              |""".stripMargin)
>       }
>     }
>   }
> {code}
> Result:
> {code:java}
> 17:45:52.720 WARN org.apache.hadoop.util.NativeCodeLoader: Unable to load 
> native-hadoop library for your platform... using builtin-java classes where 
> applicable17:45:52.720 WARN org.apache.hadoop.util.NativeCodeLoader: Unable 
> to load native-hadoop library for your platform... using builtin-java classes 
> where applicable
> 17:46:01.540 ERROR 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator: failed to 
> compile: org.codehaus.commons.compiler.CompileException: File 
> 'generated.java', Line 54, Column 8: Expression "_limit_counter_1" is not an 
> rvalueorg.codehaus.commons.compiler.CompileException: File 'generated.java', 
> Line 54, Column 8: Expression "_limit_counter_1" is not an rvalue at 
> org.codehaus.janino.UnitCompiler.compileError(UnitCompiler.java:12021) at 
> org.codehaus.janino.UnitCompiler.toRvalueOrCompileException(UnitCompiler.java:7575)
>  at 
> org.codehaus.janino.UnitCompiler.getConstantValue2(UnitCompiler.java:5766) at 
> org.codehaus.janino.UnitCompiler.access$10700(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$18$1.visitAmbiguousName(UnitCompiler.java:5717)
>  at org.codehaus.janino.Java$AmbiguousName.accept(Java.java:4429) at 
> org.codehaus.janino.UnitCompiler$18.visitLvalue(UnitCompiler.java:5714) at 
> org.codehaus.janino.Java$Lvalue.accept(Java.java:4353) at 
> org.codehaus.janino.UnitCompiler.getConstantValue(UnitCompiler.java:5710) at 
> org.codehaus.janino.UnitCompiler.compileBoolean2(UnitCompiler.java:4161) at 
> org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$14.visitBinaryOperation(UnitCompiler.java:4008)
>  at 
> org.codehaus.janino.UnitCompiler$14.visitBinaryOperation(UnitCompiler.java:3986)
>  at org.codehaus.janino.Java$BinaryOperation.accept(Java.java:5077) at 
> org.codehaus.janino.UnitCompiler.compileBoolean(UnitCompiler.java:3986) at 
> org.codehaus.janino.UnitCompiler.compileBoolean2(UnitCompiler.java:4133) at 
> org.codehaus.janino.UnitCompiler.access$6600(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$14.visitBinaryOperation(UnitCompiler.java:4008)
>  at 
> org.codehaus.janino.UnitCompiler$14.visitBinaryOperation(UnitCompiler.java:3986)
>  at org.codehaus.janino.Java$BinaryOperation.accept(Java.java:5077) at 
> org.codehaus.janino.UnitCompiler.compileBoolean(UnitCompiler.java:3986) at 
> org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:1854) at 
> org.codehaus.janino.UnitCompiler.access$2200(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1501)
>  at 
> org.codehaus.janino.UnitCompiler$6.visitWhileStatement(UnitCompiler.java:1490)
>  at org.codehaus.janino.Java$WhileStatement.accept(Java.java:3245) at 
> org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:1490) at 
> org.codehaus.janino.UnitCompiler.compileStatements(UnitCompiler.java:1573) at 
> org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:3420) at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1362)
>  at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMethods(UnitCompiler.java:1335)
>  at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:807) at 
> org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:975) at 
> org.codehaus.janino.UnitCompiler.access$700(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:392)
>  at 
> org.codehaus.janino.UnitCompiler$2.visitMemberClassDeclaration(UnitCompiler.java:384)
>  at org.codehaus.janino.Java$MemberClassDeclaration.accept(Java.java:1445) at 
> org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384) at 
> org.codehaus.janino.UnitCompiler.compileDeclaredMemberTypes(UnitCompiler.java:1312)
>  at org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:833) at 
> org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:410) at 
> org.codehaus.janino.UnitCompiler.access$400(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:389)
>  at 
> org.codehaus.janino.UnitCompiler$2.visitPackageMemberClassDeclaration(UnitCompiler.java:384)
>  at 
> org.codehaus.janino.Java$PackageMemberClassDeclaration.accept(Java.java:1594) 
> at org.codehaus.janino.UnitCompiler.compile(UnitCompiler.java:384) at 
> org.codehaus.janino.UnitCompiler.compile2(UnitCompiler.java:362) at 
> org.codehaus.janino.UnitCompiler.access$000(UnitCompiler.java:226) at 
> org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:336)
>  at 
> org.codehaus.janino.UnitCompiler$1.visitCompilationUnit(UnitCompiler.java:333)
>  at org.codehaus.janino.Java$CompilationUnit.accept(Java.java:363) at 
> org.codehaus.janino.UnitCompiler.compileUnit(UnitCompiler.java:333) at 
> org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:235) at 
> org.codehaus.janino.SimpleCompiler.compileToClassLoader(SimpleCompiler.java:464)
>  at 
> org.codehaus.janino.ClassBodyEvaluator.compileToClass(ClassBodyEvaluator.java:314)
>  at org.codehaus.janino.ClassBodyEvaluator.cook(ClassBodyEvaluator.java:237) 
> at org.codehaus.janino.SimpleCompiler.cook(SimpleCompiler.java:205) at 
> org.codehaus.commons.compiler.Cookable.cook(Cookable.java:80) at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1402)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1499)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1496)
>  at 
> com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>  at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) 
> at 
> com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>  at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257) at 
> com.google.common.cache.LocalCache.get(LocalCache.java:4000) at 
> com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
> com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1349)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:722)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:721)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:150)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:148)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:165)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:526)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:454)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:453)
>  at 
> org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:497)
>  at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs(limit.scala:121) 
> at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs$(limit.scala:120) 
> at org.apache.spark.sql.execution.GlobalLimitExec.inputRDDs(limit.scala:165) 
> at 
> org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:747)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:178)
>  at 
> org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:187)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:120)
>  at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3705) at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
>  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3703) at 
> org.apache.spark.sql.Dataset.<init>(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) at 
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:614) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.SparkSession.sql(SparkSession.scala:609) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$904(SQLQuerySuite.scala:4047) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
>  at 
> org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:52)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:246)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:244)
>  at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$903(SQLQuerySuite.scala:4044) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1437) at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) 
> at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
> at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$902(SQLQuerySuite.scala:4040) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
> org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
> org.scalatest.Transformer.apply(Transformer.scala:22) at 
> org.scalatest.Transformer.apply(Transformer.scala:20) at 
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
>  at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:178) at 
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:61)
>  at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
> org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:61) at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
>  at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) 
> at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
> org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232) 
> at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
> org.scalatest.Suite.run(Suite.scala:1112) at 
> org.scalatest.Suite.run$(Suite.scala:1094) at 
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:61)
>  at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) at 
> org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
> org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
> org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:61) at 
> org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1320)
>  at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1314)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1314) at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
>  at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
>  at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1480)
>  at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971)
>  at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
> org.scalatest.tools.Runner.run(Runner.scala) at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)17:46:01.553
>  ERROR org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator: /* 
> 001 */ public Object generate(Object[] references) {/* 002 */   return new 
> GeneratedIteratorForCodegenStage2(references);/* 003 */ }/* 004 *//* 005 */ 
> // codegenStageId=2/* 006 */ final class GeneratedIteratorForCodegenStage2 
> extends org.apache.spark.sql.execution.BufferedRowIterator {/* 007 */   
> private Object[] references;/* 008 */   private scala.collection.Iterator[] 
> inputs;/* 009 */   private int columnartorow_batchIdx_0;/* 010 */   private 
> org.apache.spark.sql.execution.joins.EmptyHashedRelation$ bhj_relation_0;/* 
> 011 */   private 
> org.apache.spark.sql.execution.vectorized.OnHeapColumnVector[] 
> columnartorow_mutableStateArray_2 = new 
> org.apache.spark.sql.execution.vectorized.OnHeapColumnVector[1];/* 012 */   
> private org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[] 
> columnartorow_mutableStateArray_3 = new 
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter[3];/* 013 
> */   private org.apache.spark.sql.vectorized.ColumnarBatch[] 
> columnartorow_mutableStateArray_1 = new 
> org.apache.spark.sql.vectorized.ColumnarBatch[1];/* 014 */   private 
> scala.collection.Iterator[] columnartorow_mutableStateArray_0 = new 
> scala.collection.Iterator[1];/* 015 *//* 016 */   public 
> GeneratedIteratorForCodegenStage2(Object[] references) {/* 017 */     
> this.references = references;/* 018 */   }/* 019 *//* 020 */   public void 
> init(int index, scala.collection.Iterator[] inputs) {/* 021 */     
> partitionIndex = index;/* 022 */     this.inputs = inputs;/* 023 */     
> columnartorow_mutableStateArray_0[0] = inputs[0];/* 024 *//* 025 */     
> columnartorow_mutableStateArray_3[0] = new 
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0);/* 
> 026 */     columnartorow_mutableStateArray_3[1] = new 
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0);/* 
> 027 */     columnartorow_mutableStateArray_3[2] = new 
> org.apache.spark.sql.catalyst.expressions.codegen.UnsafeRowWriter(1, 0);/* 
> 028 *//* 029 */     bhj_relation_0 = 
> ((org.apache.spark.sql.execution.joins.EmptyHashedRelation$) 
> ((org.apache.spark.broadcast.TorrentBroadcast) references[3] /* broadcast 
> */).value()).asReadOnlyCopy();/* 030 */     
> incPeakExecutionMemory(bhj_relation_0.estimatedSize());/* 031 *//* 032 */   
> }/* 033 *//* 034 */   private void columnartorow_nextBatch_0() throws 
> java.io.IOException {/* 035 */     if 
> (columnartorow_mutableStateArray_0[0].hasNext()) {/* 036 */       
> columnartorow_mutableStateArray_1[0] = 
> (org.apache.spark.sql.vectorized.ColumnarBatch)columnartorow_mutableStateArray_0[0].next();/*
>  037 */       ((org.apache.spark.sql.execution.metric.SQLMetric) 
> references[1] /* numInputBatches */).add(1);/* 038 */       
> ((org.apache.spark.sql.execution.metric.SQLMetric) references[0] /* 
> numOutputRows */).add(columnartorow_mutableStateArray_1[0].numRows());/* 039 
> */       columnartorow_batchIdx_0 = 0;/* 040 */       
> columnartorow_mutableStateArray_2[0] = 
> (org.apache.spark.sql.execution.vectorized.OnHeapColumnVector) 
> columnartorow_mutableStateArray_1[0].column(0);/* 041 *//* 042 */     }/* 043 
> */   }/* 044 *//* 045 */   private void bhj_doConsume_0(long bhj_expr_0_0) 
> throws java.io.IOException {/* 046 */     // If HashedRelation is empty, hash 
> inner join simply returns nothing./* 047 *//* 048 */   }/* 049 *//* 050 */   
> protected void processNext() throws java.io.IOException {/* 051 */     if 
> (columnartorow_mutableStateArray_1[0] == null) {/* 052 */       
> columnartorow_nextBatch_0();/* 053 */     }/* 054 */     while 
> (_limit_counter_1 < 3 && columnartorow_mutableStateArray_1[0] != null) {/* 
> 055 */       int columnartorow_numRows_0 = 
> columnartorow_mutableStateArray_1[0].numRows();/* 056 */       int 
> columnartorow_localEnd_0 = columnartorow_numRows_0 - 
> columnartorow_batchIdx_0;/* 057 */       for (int columnartorow_localIdx_0 = 
> 0; columnartorow_localIdx_0 < columnartorow_localEnd_0; 
> columnartorow_localIdx_0++) {/* 058 */         int columnartorow_rowIdx_0 = 
> columnartorow_batchIdx_0 + columnartorow_localIdx_0;/* 059 */         do {/* 
> 060 */           boolean columnartorow_isNull_0 = 
> columnartorow_mutableStateArray_2[0].isNullAt(columnartorow_rowIdx_0);/* 061 
> */           long columnartorow_value_0 = columnartorow_isNull_0 ? -1L : 
> (columnartorow_mutableStateArray_2[0].getLong(columnartorow_rowIdx_0));/* 062 
> *//* 063 */           boolean filter_value_2 = !columnartorow_isNull_0;/* 064 
> */           if (!filter_value_2) continue;/* 065 *//* 066 */           
> ((org.apache.spark.sql.execution.metric.SQLMetric) references[2] /* 
> numOutputRows */).add(1);/* 067 *//* 068 */           
> bhj_doConsume_0(columnartorow_value_0);/* 069 *//* 070 */         } 
> while(false);/* 071 */         if (shouldStop()) { columnartorow_batchIdx_0 = 
> columnartorow_rowIdx_0 + 1; return; }/* 072 */       }/* 073 */       
> columnartorow_batchIdx_0 = columnartorow_numRows_0;/* 074 */       
> columnartorow_mutableStateArray_1[0] = null;/* 075 */       
> columnartorow_nextBatch_0();/* 076 */     }/* 077 */   }/* 078 *//* 079 */ }
> 17:46:01.555 ERROR 
> org.apache.spark.sql.execution.datasources.FileFormatWriter: Aborting job 
> 0ca25288-ee2c-4fe9-b439-979496e6cd9f.java.util.concurrent.ExecutionException: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: Expression "_limit_counter_1" is not an rvalue at 
> com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
>  at 
> com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
>  at 
> com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116) 
> at 
> com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
>  at 
> com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
>  at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380) 
> at 
> com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>  at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257) at 
> com.google.common.cache.LocalCache.get(LocalCache.java:4000) at 
> com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
> com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1349)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:722)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:721)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:150)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:148)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:165)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:526)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:454)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:453)
>  at 
> org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:497)
>  at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs(limit.scala:121) 
> at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs$(limit.scala:120) 
> at org.apache.spark.sql.execution.GlobalLimitExec.inputRDDs(limit.scala:165) 
> at 
> org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:747)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:178)
>  at 
> org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:187)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:120)
>  at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3705) at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
>  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3703) at 
> org.apache.spark.sql.Dataset.<init>(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) at 
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:614) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.SparkSession.sql(SparkSession.scala:609) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$904(SQLQuerySuite.scala:4047) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
>  at 
> org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:52)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:246)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:244)
>  at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$903(SQLQuerySuite.scala:4044) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1437) at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) 
> at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
> at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$902(SQLQuerySuite.scala:4040) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
> org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
> org.scalatest.Transformer.apply(Transformer.scala:22) at 
> org.scalatest.Transformer.apply(Transformer.scala:20) at 
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
>  at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:178) at 
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:61)
>  at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
> org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:61) at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
>  at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) 
> at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
> org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232) 
> at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
> org.scalatest.Suite.run(Suite.scala:1112) at 
> org.scalatest.Suite.run$(Suite.scala:1094) at 
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:61)
>  at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) at 
> org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
> org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
> org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:61) at 
> org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1320)
>  at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1314)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1314) at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
>  at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
>  at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1480)
>  at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971)
>  at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
> org.scalatest.tools.Runner.run(Runner.scala) at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)Caused
>  by: org.codehaus.commons.compiler.CompileException: File 'generated.java', 
> Line 54, Column 8: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: Expression "_limit_counter_1" is not an rvalue at 
> org.apache.spark.sql.errors.QueryExecutionErrors$.compilerError(QueryExecutionErrors.scala:328)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1414)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1499)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1496)
>  at 
> com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>  at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) 
> ... 122 moreJob aborted.org.apache.spark.SparkException: Job aborted. at 
> org.apache.spark.sql.errors.QueryExecutionErrors$.jobAbortedError(QueryExecutionErrors.scala:414)
>  at 
> org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:233)
>  at 
> org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelationCommand.run(InsertIntoHadoopFsRelationCommand.scala:187)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:108)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:106)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:120)
>  at org.apache.spark.sql.Dataset.$anonfun$logicalPlan$1(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3705) at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:103)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
>  at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3703) at 
> org.apache.spark.sql.Dataset.<init>(Dataset.scala:228) at 
> org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) at 
> org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:614) at 
> org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:771) at 
> org.apache.spark.sql.SparkSession.sql(SparkSession.scala:609) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$904(SQLQuerySuite.scala:4047) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf(SQLHelper.scala:54) 
> at 
> org.apache.spark.sql.catalyst.plans.SQLHelper.withSQLConf$(SQLHelper.scala:38)
>  at 
> org.apache.spark.sql.SQLQuerySuite.org$apache$spark$sql$test$SQLTestUtilsBase$$super$withSQLConf(SQLQuerySuite.scala:52)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf(SQLTestUtils.scala:246)
>  at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withSQLConf$(SQLTestUtils.scala:244)
>  at org.apache.spark.sql.SQLQuerySuite.withSQLConf(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$903(SQLQuerySuite.scala:4044) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1437) at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) 
> at 
> org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) 
> at org.apache.spark.sql.SQLQuerySuite.withTable(SQLQuerySuite.scala:52) at 
> org.apache.spark.sql.SQLQuerySuite.$anonfun$new$902(SQLQuerySuite.scala:4040) 
> at scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) at 
> org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) at 
> org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
> org.scalatest.Transformer.apply(Transformer.scala:22) at 
> org.scalatest.Transformer.apply(Transformer.scala:20) at 
> org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
>  at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:178) at 
> org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
>  at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:61)
>  at org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) at 
> org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) at 
> org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:61) at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
>  at org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) 
> at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) at 
> org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233) at 
> org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232) 
> at org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563) at 
> org.scalatest.Suite.run(Suite.scala:1112) at 
> org.scalatest.Suite.run$(Suite.scala:1094) at 
> org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
>  at 
> org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
>  at org.scalatest.SuperEngine.runImpl(Engine.scala:535) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237) at 
> org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236) at 
> org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:61)
>  at 
> org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213) at 
> org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210) at 
> org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208) at 
> org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:61) at 
> org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45) at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13(Runner.scala:1320)
>  at 
> org.scalatest.tools.Runner$.$anonfun$doRunRunRunDaDoRunRun$13$adapted(Runner.scala:1314)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1314) at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24(Runner.scala:993)
>  at 
> org.scalatest.tools.Runner$.$anonfun$runOptionallyWithPassFailReporter$24$adapted(Runner.scala:971)
>  at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1480)
>  at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:971)
>  at org.scalatest.tools.Runner$.run(Runner.scala:798) at 
> org.scalatest.tools.Runner.run(Runner.scala) at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:133)
>  at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:27)Caused
>  by: java.util.concurrent.ExecutionException: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: Expression "_limit_counter_1" is not an rvalue at 
> com.google.common.util.concurrent.AbstractFuture$Sync.getValue(AbstractFuture.java:306)
>  at 
> com.google.common.util.concurrent.AbstractFuture$Sync.get(AbstractFuture.java:293)
>  at 
> com.google.common.util.concurrent.AbstractFuture.get(AbstractFuture.java:116) 
> at 
> com.google.common.util.concurrent.Uninterruptibles.getUninterruptibly(Uninterruptibles.java:135)
>  at 
> com.google.common.cache.LocalCache$Segment.getAndRecordStats(LocalCache.java:2410)
>  at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2380) 
> at 
> com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2342)
>  at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2257) at 
> com.google.common.cache.LocalCache.get(LocalCache.java:4000) at 
> com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:4004) at 
> com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4874)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.compile(CodeGenerator.scala:1349)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.liftedTree1$1(WholeStageCodegenExec.scala:722)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:721)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD$lzycompute(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.inputRDD(ShuffleExchangeExec.scala:117)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency$lzycompute(ShuffleExchangeExec.scala:150)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.shuffleDependency(ShuffleExchangeExec.scala:148)
>  at 
> org.apache.spark.sql.execution.exchange.ShuffleExchangeExec.doExecute(ShuffleExchangeExec.scala:165)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:526)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:454)
>  at 
> org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:453)
>  at 
> org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:497)
>  at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs(limit.scala:121) 
> at org.apache.spark.sql.execution.BaseLimitExec.inputRDDs$(limit.scala:120) 
> at org.apache.spark.sql.execution.GlobalLimitExec.inputRDDs(limit.scala:165) 
> at 
> org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)
>  at 
> org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:747)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)
>  at 
> org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176) at 
> org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:178)
>  ... 84 moreCaused by: org.codehaus.commons.compiler.CompileException: File 
> 'generated.java', Line 54, Column 8: failed to compile: 
> org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 
> 54, Column 8: Expression "_limit_counter_1" is not an rvalue at 
> org.apache.spark.sql.errors.QueryExecutionErrors$.compilerError(QueryExecutionErrors.scala:328)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$.org$apache$spark$sql$catalyst$expressions$codegen$CodeGenerator$$doCompile(CodeGenerator.scala:1414)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1499)
>  at 
> org.apache.spark.sql.catalyst.expressions.codegen.CodeGenerator$$anon$1.load(CodeGenerator.scala:1496)
>  at 
> com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3599)
>  at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2379) 
> ... 122 more
> 17:46:01.988 WARN org.apache.spark.sql.SQLQuerySuite: 
> ===== POSSIBLE THREAD LEAK IN SUITE o.a.s.sql.SQLQuerySuite, thread names: 
> rpc-boss-3-1, shuffle-boss-6-1 =====
> Process finished with exit code 0
> {code}



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to