[ 
https://issues.apache.org/jira/browse/SPARK-35935?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17371853#comment-17371853
 ] 

Apache Spark commented on SPARK-35935:
--------------------------------------

User 'MaxGekk' has created a pull request for this issue:
https://github.com/apache/spark/pull/33152

> REPAIR TABLE fails on table refreshing
> --------------------------------------
>
>                 Key: SPARK-35935
>                 URL: https://issues.apache.org/jira/browse/SPARK-35935
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 3.2.0
>            Reporter: Max Gekk
>            Assignee: Max Gekk
>            Priority: Major
>             Fix For: 3.2.0
>
>
> MSCK REPAIR TABLE can fail while table recovering with the exception:
> {code:java}
> Error in SQL statement: AnalysisException: Incompatible format detected.
> ...
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable.org$apache$spark$sql$execution$datasources$FindDataSourceTable$$verifyNonDeltaTable(DataSourceStrategy.scala:297)
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply0$1.applyOrElse(DataSourceStrategy.scala:378)
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable$$anonfun$apply0$1.applyOrElse(DataSourceStrategy.scala:342)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDownWithPruning$2(AnalysisHelper.scala:170)
>       at 
> org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:86)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDownWithPruning$1(AnalysisHelper.scala:170)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:316)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDownWithPruning(AnalysisHelper.scala:168)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDownWithPruning$(AnalysisHelper.scala:164)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDownWithPruning(LogicalPlan.scala:30)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDownWithPruning$4(AnalysisHelper.scala:175)
>       at 
> org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren(TreeNode.scala:1093)
>       at 
> org.apache.spark.sql.catalyst.trees.UnaryLike.mapChildren$(TreeNode.scala:1092)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.UnaryNode.mapChildren(LogicalPlan.scala:187)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.$anonfun$resolveOperatorsDownWithPruning$1(AnalysisHelper.scala:175)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.allowInvokingTransformsInAnalyzer(AnalysisHelper.scala:316)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDownWithPruning(AnalysisHelper.scala:168)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsDownWithPruning$(AnalysisHelper.scala:164)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsDownWithPruning(LogicalPlan.scala:30)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsWithPruning(AnalysisHelper.scala:98)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperatorsWithPruning$(AnalysisHelper.scala:95)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperatorsWithPruning(LogicalPlan.scala:30)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators(AnalysisHelper.scala:75)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.resolveOperators$(AnalysisHelper.scala:74)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:30)
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply0(DataSourceStrategy.scala:342)
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:336)
>       at 
> org.apache.spark.sql.execution.datasources.FindDataSourceTable.apply(DataSourceStrategy.scala:248)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$3(RuleExecutor.scala:221)
>       at 
> com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:221)
>       at 
> scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126)
>       at 
> scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122)
>       at scala.collection.immutable.List.foldLeft(List.scala:89)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:218)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:210)
>       at scala.collection.immutable.List.foreach(List.scala:392)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:210)
>       at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.org$apache$spark$sql$catalyst$analysis$Analyzer$$executeSameContext(Analyzer.scala:251)
>       at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:245)
>       at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.execute(Analyzer.scala:207)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:188)
>       at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:109)
>       at 
> org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:188)
>       at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.$anonfun$executeAndCheck$1(Analyzer.scala:228)
>       at 
> org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper$.markInAnalyzer(AnalysisHelper.scala:323)
>       at 
> org.apache.spark.sql.catalyst.analysis.Analyzer.executeAndCheck(Analyzer.scala:227)
>       at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:96)
>       at 
> com.databricks.spark.util.FrameProfiler$.record(FrameProfiler.scala:80)
>       at 
> org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:134)
>       at 
> org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:178)
>       at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:852)
>       at 
> org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:178)
>       at 
> org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:97)
>       at 
> org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:94)
>       at 
> org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:86)
>       at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:94)
>       at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:852)
>       at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:92)
>       at org.apache.spark.sql.SparkSession.table(SparkSession.scala:668)
>       at 
> org.apache.spark.sql.internal.CatalogImpl.refreshTable(CatalogImpl.scala:548)
>       at 
> org.apache.spark.sql.execution.command.AlterTableRecoverPartitionsCommand.run(ddl.scala:714)
>       at 
> org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:79)
> {code}
> The same command worked on previous Spark versions.



--
This message was sent by Atlassian Jira
(v8.3.4#803005)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to