[ 
https://issues.apache.org/jira/browse/SPARK-33296?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

angerszhu closed SPARK-33296.
-----------------------------

Not a problem now

> Format exception when use cube func && with cube
> ------------------------------------------------
>
>                 Key: SPARK-33296
>                 URL: https://issues.apache.org/jira/browse/SPARK-33296
>             Project: Spark
>          Issue Type: Sub-task
>          Components: SQL
>    Affects Versions: 3.1.0
>            Reporter: angerszhu
>            Priority: Major
>
> spark-sql> explain extended select a, b, c from x group by cube(a, b, c) with 
> cube;spark-sql> explain extended select a, b, c from x group by cube(a, b, c) 
> with cube;20/10/30 11:16:50 ERROR SparkSQLDriver: Failed in [explain extended 
> select a, b, c from x group by cube(a, b, c) with 
> cube]java.lang.UnsupportedOperationException at 
> org.apache.spark.sql.catalyst.expressions.GroupingSet.dataType(grouping.scala:36)
>  at 
> org.apache.spark.sql.catalyst.expressions.GroupingSet.dataType$(grouping.scala:36)
>  at 
> org.apache.spark.sql.catalyst.expressions.Cube.dataType(grouping.scala:61) at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkValidGroupingExprs$1(CheckAnalysis.scala:269)
>  at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$12(CheckAnalysis.scala:285)
>  at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$12$adapted(CheckAnalysis.scala:285)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1(CheckAnalysis.scala:285)
>  at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.$anonfun$checkAnalysis$1$adapted(CheckAnalysis.scala:92)
>  at 
> org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:184) at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis(CheckAnalysis.scala:92)
>  at 
> org.apache.spark.sql.catalyst.analysis.CheckAnalysis.checkAnalysis$(CheckAnalysis.scala:89)
>  at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:132)
>  at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:162)
>  at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:214)
>  at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:159)
>  at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:73)
>  at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111)
>  at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:138)
>  at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:769) at 
> org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:138)
>  at 
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:73)
>  at 
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:71)
>  at 
> org.apache.spark.sql.execution.QueryExecution.writePlans(QueryExecution.scala:213)
>  at 
> org.apache.spark.sql.execution.QueryExecution.toString(QueryExecution.scala:235)
>  at 
> org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:191)
>  at 
> org.apache.spark.sql.execution.QueryExecution.explainString(QueryExecution.scala:170)
>  at 
> org.apache.spark.sql.execution.command.ExplainCommand.run(commands.scala:158) 
> at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:70)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:68)
>  at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.executeCollect(commands.scala:79)



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to