[ https://issues.apache.org/jira/browse/SPARK-34236?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Kent Yao updated SPARK-34236: ----------------------------- Summary: v2 Overwrite w/ null static partition raise Cannot translate expression to source filter: null (was: v2 Overwrite will null static partition raise Cannot translate expression to source filter: null ) > v2 Overwrite w/ null static partition raise Cannot translate expression to > source filter: null > ----------------------------------------------------------------------------------------------- > > Key: SPARK-34236 > URL: https://issues.apache.org/jira/browse/SPARK-34236 > Project: Spark > Issue Type: Bug > Components: SQL > Affects Versions: 3.1.0 > Reporter: Kent Yao > Priority: Major > > {code:java} > SPARK-34223: static partition with null raise NPE *** FAILED *** (19 > milliseconds) > [info] org.apache.spark.sql.AnalysisException: Cannot translate expression > to source filter: null > [info] at > org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.$anonfun$applyOrElse$1(V2Writes.scala:50) > [info] at scala.collection.immutable.List.flatMap(List.scala:366) > [info] at > org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.applyOrElse(V2Writes.scala:47) > [info] at > org.apache.spark.sql.execution.datasources.v2.V2Writes$$anonfun$apply$1.applyOrElse(V2Writes.scala:39) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDown$1(TreeNode.scala:317) > [info] at > org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:73) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:317) > [info] at > org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDown(LogicalPlan.scala:29) > [info] at > org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown(AnalysisHelper.scala:171) > [info] at > org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDown$(AnalysisHelper.scala:169) > [info] at > org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDown(LogicalPlan.scala:29) > [info] at > org.apache.spark.sql.execution.datasources.v2.V2Writes$.apply(V2Writes.scala:39) > [info] at > org.apache.spark.sql.execution.datasources.v2.V2Writes$.apply(V2Writes.scala:35) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$2(RuleExecutor.scala:216) > [info] at > scala.collection.LinearSeqOptimized.foldLeft(LinearSeqOptimized.scala:126) > [info] at > scala.collection.LinearSeqOptimized.foldLeft$(LinearSeqOptimized.scala:122) > [info] at scala.collection.immutable.List.foldLeft(List.scala:91) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1(RuleExecutor.scala:213) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$execute$1$adapted(RuleExecutor.scala:205) > [info] at scala.collection.immutable.List.foreach(List.scala:431) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:205) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.$anonfun$executeAndTrack$1(RuleExecutor.scala:183) > [info] at > org.apache.spark.sql.catalyst.QueryPlanningTracker$.withTracker(QueryPlanningTracker.scala:88) > [info] at > org.apache.spark.sql.catalyst.rules.RuleExecutor.executeAndTrack(RuleExecutor.scala:183) > [info] at > org.apache.spark.sql.execution.QueryExecution.$anonfun$optimizedPlan$1(QueryExecution.scala:87) > [info] at > org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:111) > [info] at > org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:143) > [info] at > org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) > [info] at > org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:143) > [info] at > org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:84) > [info] at > org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:84) > [info] at > org.apache.spark.sql.execution.QueryExecution.assertOptimized(QueryExecution.scala:95) > [info] at > org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:113) > [info] at > org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:110) > [info] at > org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:101) > [info] at > org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:163) > [info] at > org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:90) > [info] at > org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) > [info] at > org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64) > [info] at org.apache.spark.sql.Dataset.withAction(Dataset.scala:3697) > [info] at org.apache.spark.sql.Dataset.<init>(Dataset.scala:228) > [info] at org.apache.spark.sql.Dataset$.$anonfun$ofRows$2(Dataset.scala:99) > [info] at > org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) > [info] at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:96) > [info] at > org.apache.spark.sql.SparkSession.$anonfun$sql$1(SparkSession.scala:615) > [info] at > org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:772) > [info] at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:610) > [info] at > org.apache.spark.sql.test.SQLTestUtilsBase.$anonfun$sql$1(SQLTestUtils.scala:231) > [info] at > org.apache.spark.sql.SQLInsertTestSuite.$anonfun$$init$$42(SQLInsertTestSuite.scala:207) > [info] at > scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) > [info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1437) > [info] at > org.apache.spark.sql.test.SQLTestUtilsBase.withTable(SQLTestUtils.scala:305) > [info] at > org.apache.spark.sql.test.SQLTestUtilsBase.withTable$(SQLTestUtils.scala:303) > [info] at > org.apache.spark.sql.DSV2SQLInsertTestSuite.withTable(SQLInsertTestSuite.scala:220) > [info] at > org.apache.spark.sql.SQLInsertTestSuite.$anonfun$$init$$41(SQLInsertTestSuite.scala:205) > [info] at > scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23) > [info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85) > [info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83) > [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) > [info] at org.scalatest.Transformer.apply(Transformer.scala:22) > [info] at org.scalatest.Transformer.apply(Transformer.scala:20) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190) > [info] at > org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:176) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200) > [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182) > [info] at > org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:61) > [info] at > org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234) > [info] at > org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227) > [info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:61) > [info] at > org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233) > [info] at > org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413) > [info] at scala.collection.immutable.List.foreach(List.scala:431) > [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) > [info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396) > [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475) > {code} -- This message was sent by Atlassian Jira (v8.3.4#803005) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org