This is an automated email from the ASF dual-hosted git repository.
felixybw pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git
The following commit(s) were added to refs/heads/main by this push:
new 1e06169cd [GLUTEN-6151] Reset local property after finishing write
operator (#6163)
1e06169cd is described below
commit 1e06169cde0c6a22dc36d7c0af2a401bd73e1701
Author: JiaKe <[email protected]>
AuthorDate: Sat Jun 22 02:58:13 2024 +0800
[GLUTEN-6151] Reset local property after finishing write operator (#6163)
quick bug fix. Need to reset local property on fallback
---
.../datasources/GlutenWriterColumnarRules.scala | 33 ++++++++++++++--------
1 file changed, 21 insertions(+), 12 deletions(-)
diff --git
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/datasources/GlutenWriterColumnarRules.scala
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/datasources/GlutenWriterColumnarRules.scala
index f9ad5201d..7063c3f67 100644
---
a/gluten-core/src/main/scala/org/apache/spark/sql/execution/datasources/GlutenWriterColumnarRules.scala
+++
b/gluten-core/src/main/scala/org/apache/spark/sql/execution/datasources/GlutenWriterColumnarRules.scala
@@ -162,19 +162,28 @@ object GlutenWriterColumnarRules {
if write.getClass.getName == NOOP_WRITE &&
BackendsApiManager.getSettings.enableNativeWriteFiles() =>
injectFakeRowAdaptor(rc, rc.child)
- case rc @ DataWritingCommandExec(cmd, child)
- if
BackendsApiManager.getSettings.supportNativeWrite(child.output.toStructType.fields)
=>
- val format = getNativeFormat(cmd)
- session.sparkContext.setLocalProperty(
- "staticPartitionWriteOnly",
- BackendsApiManager.getSettings.staticPartitionWriteOnly().toString)
- // FIXME: We should only use context property if having no other
approaches.
- // Should see if there is another way to pass these options.
- session.sparkContext.setLocalProperty("isNativeAppliable",
format.isDefined.toString)
- session.sparkContext.setLocalProperty("nativeFormat",
format.getOrElse(""))
- if (format.isDefined) {
- injectFakeRowAdaptor(rc, child)
+ case rc @ DataWritingCommandExec(cmd, child) =>
+ if
(BackendsApiManager.getSettings.supportNativeWrite(child.output.toStructType.fields))
{
+ val format = getNativeFormat(cmd)
+ session.sparkContext.setLocalProperty(
+ "staticPartitionWriteOnly",
+ BackendsApiManager.getSettings.staticPartitionWriteOnly().toString)
+ // FIXME: We should only use context property if having no other
approaches.
+ // Should see if there is another way to pass these options.
+ session.sparkContext.setLocalProperty("isNativeAppliable",
format.isDefined.toString)
+ session.sparkContext.setLocalProperty("nativeFormat",
format.getOrElse(""))
+ if (format.isDefined) {
+ injectFakeRowAdaptor(rc, child)
+ } else {
+ rc.withNewChildren(rc.children.map(apply))
+ }
} else {
+ session.sparkContext.setLocalProperty(
+ "staticPartitionWriteOnly",
+ BackendsApiManager.getSettings.staticPartitionWriteOnly().toString)
+ session.sparkContext.setLocalProperty("isNativeAppliable", "false")
+ session.sparkContext.setLocalProperty("nativeFormat", "")
+
rc.withNewChildren(rc.children.map(apply))
}
case plan: SparkPlan => plan.withNewChildren(plan.children.map(apply))
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]