wForget commented on issue #2424:
URL:
https://github.com/apache/datafusion-comet/issues/2424#issuecomment-3317018488
Exception stack:
```
25/09/22 13:12:21 INFO CometScanExec: Planning scan with bin packing, max
size: 134217728 bytes, open cost is considered as scanning 4194304 bytes.
Traceback (most recent call last):
File
"/Users/wforget/work/git/datafusion-comet/dev/benchmarks/tpcbench.py", line
120, in <module>
main(args.benchmark, args.data, args.queries, int(args.iterations),
args.output, args.name)
~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/Users/wforget/work/git/datafusion-comet/dev/benchmarks/tpcbench.py", line 82,
in main
df.explain()
~~~~~~~~~~^^
File
"/Users/wforget/work/spark/spark-4.0.1-bin-hadoop3/python/lib/pyspark.zip/pyspark/sql/classic/dataframe.py",
line 269, in explain
print(self._sc._jvm.PythonSQLUtils.explainString(self._jdf.queryExecution(),
explain_mode))
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File
"/Users/wforget/work/spark/spark-4.0.1-bin-hadoop3/python/lib/py4j-0.10.9.9-src.zip/py4j/java_gateway.py",
line 1362, in __call__
return_value = get_return_value(
answer, self.gateway_client, self.target_id, self.name)
File
"/Users/wforget/work/spark/spark-4.0.1-bin-hadoop3/python/lib/pyspark.zip/pyspark/errors/exceptions/captured.py",
line 288, in deco
raise converted from None
pyspark.errors.exceptions.captured.IllegalArgumentException: requirement
failed: Subquery subquery#62, [id=#95] has not finished
JVM stacktrace:
java.lang.IllegalArgumentException: requirement failed: Subquery
subquery#62, [id=#95] has not finished
at scala.Predef$.require(Predef.scala:337)
at
org.apache.spark.sql.execution.ScalarSubquery.toLiteral(subquery.scala:108)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.$anonfun$getPushedDownFilters$1(ShimCometScanExec.scala:70)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec$$anonfun$$nestedInanonfun$translateToV1Filters$1$1.applyOrElse(ShimCometScanExec.scala:80)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec$$anonfun$$nestedInanonfun$translateToV1Filters$1$1.applyOrElse(ShimCometScanExec.scala:77)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:470)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:86)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:470)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:475)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren(TreeNode.scala:1259)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren$(TreeNode.scala:1256)
at
org.apache.spark.sql.catalyst.expressions.BinaryExpression.mapChildren(Expression.scala:683)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:475)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:446)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:414)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.$anonfun$translateToV1Filters$1(ShimCometScanExec.scala:77)
at scala.collection.immutable.List.map(List.scala:251)
at scala.collection.immutable.List.map(List.scala:79)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.translateToV1Filters(ShimCometScanExec.scala:77)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.getPushedDownFilters(ShimCometScanExec.scala:70)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.getPushedDownFilters$(ShimCometScanExec.scala:69)
at
org.apache.spark.sql.comet.CometScanExec.getPushedDownFilters(CometScanExec.scala:61)
at
org.apache.spark.sql.comet.CometScanExec.pushedDownFilters$lzycompute(CometScanExec.scala:159)
at
org.apache.spark.sql.comet.CometScanExec.pushedDownFilters(CometScanExec.scala:159)
at
org.apache.spark.sql.comet.CometScanExec.inputRDD$lzycompute(CometScanExec.scala:200)
at
org.apache.spark.sql.comet.CometScanExec.inputRDD(CometScanExec.scala:191)
at
org.apache.comet.serde.QueryPlanSerde$.operator2Proto(QueryPlanSerde.scala:1102)
at
org.apache.comet.rules.CometExecRule.org$apache$comet$rules$CometExecRule$$convertNode$1(CometExecRule.scala:160)
at
org.apache.comet.rules.CometExecRule$$anonfun$transform$43.applyOrElse(CometExecRule.scala:559)
at
org.apache.comet.rules.CometExecRule$$anonfun$transform$43.applyOrElse(CometExecRule.scala:558)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$2(TreeNode.scala:524)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:86)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:524)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.FilterExec.mapChildren(basicPhysicalOperators.scala:220)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.SortExec.mapChildren(SortExec.scala:40)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren(TreeNode.scala:1257)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren$(TreeNode.scala:1256)
at
org.apache.spark.sql.execution.joins.SortMergeJoinExec.mapChildren(SortMergeJoinExec.scala:39)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.ProjectExec.mapChildren(basicPhysicalOperators.scala:42)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.mapChildren(HashAggregateExec.scala:52)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.mapChildren(HashAggregateExec.scala:52)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.SortExec.mapChildren(SortExec.scala:40)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:497)
at
org.apache.comet.rules.CometExecRule.transform(CometExecRule.scala:558)
at org.apache.comet.rules.CometExecRule._apply(CometExecRule.scala:643)
at org.apache.comet.rules.CometExecRule.apply(CometExecRule.scala:616)
at org.apache.comet.rules.CometExecRule.apply(CometExecRule.scala:52)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec$.$anonfun$applyPhysicalRules$2(AdaptiveSparkPlanExec.scala:936)
at scala.collection.LinearSeqOps.foldLeft(LinearSeq.scala:183)
at scala.collection.LinearSeqOps.foldLeft$(LinearSeq.scala:179)
at scala.collection.immutable.List.foldLeft(List.scala:79)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec$.applyPhysicalRules(AdaptiveSparkPlanExec.scala:935)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$initialPlan$1(AdaptiveSparkPlanExec.scala:213)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.<init>(AdaptiveSparkPlanExec.scala:210)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.applyInternal(InsertAdaptiveSparkPlan.scala:71)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.apply(InsertAdaptiveSparkPlan.scala:49)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.apply(InsertAdaptiveSparkPlan.scala:44)
at
org.apache.spark.sql.execution.QueryExecution$.$anonfun$prepareForExecution$1(QueryExecution.scala:574)
at scala.collection.LinearSeqOps.foldLeft(LinearSeq.scala:183)
at scala.collection.LinearSeqOps.foldLeft$(LinearSeq.scala:179)
at scala.collection.immutable.List.foldLeft(List.scala:79)
at
org.apache.spark.sql.execution.QueryExecution$.prepareForExecution(QueryExecution.scala:573)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyExecutedPlan$2(QueryExecution.scala:238)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:148)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:278)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:654)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:278)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:277)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyExecutedPlan$1(QueryExecution.scala:238)
at scala.util.Try$.apply(Try.scala:217)
at
org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1378)
at
org.apache.spark.util.Utils$.getTryWithCallerStacktrace(Utils.scala:1439)
at org.apache.spark.util.LazyTry.get(LazyTry.scala:58)
at
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:248)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$simpleString$2(QueryExecution.scala:303)
at
org.apache.spark.sql.catalyst.plans.QueryPlan$.append(QueryPlan.scala:772)
at
org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:303)
at
org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:332)
at
org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:312)
at
org.apache.spark.sql.api.python.PythonSQLUtils$.explainString(PythonSQLUtils.scala:115)
at
org.apache.spark.sql.api.python.PythonSQLUtils.explainString(PythonSQLUtils.scala)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:77)
at
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:569)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)
at py4j.Gateway.invoke(Gateway.java:282)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at
py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:184)
at py4j.ClientServerConnection.run(ClientServerConnection.java:108)
at java.base/java.lang.Thread.run(Thread.java:840)
Suppressed: org.apache.spark.util.Utils$OriginalTryStackTraceException:
Full stacktrace of original doTryWithCallerStacktrace caller
at scala.Predef$.require(Predef.scala:337)
at
org.apache.spark.sql.execution.ScalarSubquery.toLiteral(subquery.scala:108)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.$anonfun$getPushedDownFilters$1(ShimCometScanExec.scala:70)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec$$anonfun$$nestedInanonfun$translateToV1Filters$1$1.applyOrElse(ShimCometScanExec.scala:80)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec$$anonfun$$nestedInanonfun$translateToV1Filters$1$1.applyOrElse(ShimCometScanExec.scala:77)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:470)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:86)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:470)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$3(TreeNode.scala:475)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren(TreeNode.scala:1259)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren$(TreeNode.scala:1256)
at
org.apache.spark.sql.catalyst.expressions.BinaryExpression.mapChildren(Expression.scala:683)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:475)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:446)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:414)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.$anonfun$translateToV1Filters$1(ShimCometScanExec.scala:77)
at scala.collection.immutable.List.map(List.scala:251)
at scala.collection.immutable.List.map(List.scala:79)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.translateToV1Filters(ShimCometScanExec.scala:77)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.getPushedDownFilters(ShimCometScanExec.scala:70)
at
org.apache.spark.sql.comet.shims.ShimCometScanExec.getPushedDownFilters$(ShimCometScanExec.scala:69)
at
org.apache.spark.sql.comet.CometScanExec.getPushedDownFilters(CometScanExec.scala:61)
at
org.apache.spark.sql.comet.CometScanExec.pushedDownFilters$lzycompute(CometScanExec.scala:159)
at
org.apache.spark.sql.comet.CometScanExec.pushedDownFilters(CometScanExec.scala:159)
at
org.apache.spark.sql.comet.CometScanExec.inputRDD$lzycompute(CometScanExec.scala:200)
at
org.apache.spark.sql.comet.CometScanExec.inputRDD(CometScanExec.scala:191)
at
org.apache.comet.serde.QueryPlanSerde$.operator2Proto(QueryPlanSerde.scala:1102)
at
org.apache.comet.rules.CometExecRule.org$apache$comet$rules$CometExecRule$$convertNode$1(CometExecRule.scala:160)
at
org.apache.comet.rules.CometExecRule$$anonfun$transform$43.applyOrElse(CometExecRule.scala:559)
at
org.apache.comet.rules.CometExecRule$$anonfun$transform$43.applyOrElse(CometExecRule.scala:558)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$2(TreeNode.scala:524)
at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(origin.scala:86)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:524)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.FilterExec.mapChildren(basicPhysicalOperators.scala:220)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.SortExec.mapChildren(SortExec.scala:40)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren(TreeNode.scala:1257)
at
org.apache.spark.sql.catalyst.trees.BinaryLike.mapChildren$(TreeNode.scala:1256)
at
org.apache.spark.sql.execution.joins.SortMergeJoinExec.mapChildren(SortMergeJoinExec.scala:39)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.ProjectExec.mapChildren(basicPhysicalOperators.scala:42)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.mapChildren(HashAggregateExec.scala:52)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.aggregate.HashAggregateExec.mapChildren(HashAggregateExec.scala:52)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.exchange.Exchange.mapChildren(Exchange.scala:36)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformUpWithPruning$1(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1231)
at
org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1230)
at
org.apache.spark.sql.execution.SortExec.mapChildren(SortExec.scala:40)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUpWithPruning(TreeNode.scala:521)
at
org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:497)
at
org.apache.comet.rules.CometExecRule.transform(CometExecRule.scala:558)
at
org.apache.comet.rules.CometExecRule._apply(CometExecRule.scala:643)
at
org.apache.comet.rules.CometExecRule.apply(CometExecRule.scala:616)
at
org.apache.comet.rules.CometExecRule.apply(CometExecRule.scala:52)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec$.$anonfun$applyPhysicalRules$2(AdaptiveSparkPlanExec.scala:936)
at scala.collection.LinearSeqOps.foldLeft(LinearSeq.scala:183)
at scala.collection.LinearSeqOps.foldLeft$(LinearSeq.scala:179)
at scala.collection.immutable.List.foldLeft(List.scala:79)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec$.applyPhysicalRules(AdaptiveSparkPlanExec.scala:935)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.$anonfun$initialPlan$1(AdaptiveSparkPlanExec.scala:213)
at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804)
at
org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanExec.<init>(AdaptiveSparkPlanExec.scala:210)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.applyInternal(InsertAdaptiveSparkPlan.scala:71)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.apply(InsertAdaptiveSparkPlan.scala:49)
at
org.apache.spark.sql.execution.adaptive.InsertAdaptiveSparkPlan.apply(InsertAdaptiveSparkPlan.scala:44)
at
org.apache.spark.sql.execution.QueryExecution$.$anonfun$prepareForExecution$1(QueryExecution.scala:574)
at scala.collection.LinearSeqOps.foldLeft(LinearSeq.scala:183)
at scala.collection.LinearSeqOps.foldLeft$(LinearSeq.scala:179)
at scala.collection.immutable.List.foldLeft(List.scala:79)
at
org.apache.spark.sql.execution.QueryExecution$.prepareForExecution(QueryExecution.scala:573)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyExecutedPlan$2(QueryExecution.scala:238)
at
org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:148)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:278)
at
org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:654)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:278)
at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:804)
at
org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:277)
at
org.apache.spark.sql.execution.QueryExecution.$anonfun$lazyExecutedPlan$1(QueryExecution.scala:238)
at scala.util.Try$.apply(Try.scala:217)
at
org.apache.spark.util.Utils$.doTryWithCallerStacktrace(Utils.scala:1378)
at
org.apache.spark.util.LazyTry.tryT$lzycompute(LazyTry.scala:46)
at org.apache.spark.util.LazyTry.tryT(LazyTry.scala:46)
... 21 more
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]