This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new b22db6598c [spark] Introduce BucketExpression.quote to unify quote
b22db6598c is described below
commit b22db6598ce5c7250ca63a1152a3c200d69b9298
Author: JingsongLi <[email protected]>
AuthorDate: Tue Jun 17 15:36:10 2025 +0800
[spark] Introduce BucketExpression.quote to unify quote
---
.../src/main/scala/org/apache/paimon/spark/PaimonScan.scala | 4 ++--
.../org/apache/paimon/spark/commands/BucketExpression.scala | 10 +++-------
.../org/apache/paimon/spark/write/PaimonWriteRequirement.scala | 6 ++----
3 files changed, 7 insertions(+), 13 deletions(-)
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala
index 6efdfe20f6..0bbdab39d8 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonScan.scala
@@ -19,9 +19,9 @@
package org.apache.paimon.spark
import org.apache.paimon.predicate.Predicate
+import org.apache.paimon.spark.commands.BucketExpression.quote
import org.apache.paimon.table.{BucketMode, FileStoreTable, Table}
import org.apache.paimon.table.source.{DataSplit, Split}
-import org.apache.paimon.utils.StringUtils
import org.apache.spark.sql.PaimonUtils.fieldReference
import org.apache.spark.sql.connector.expressions._
@@ -67,7 +67,7 @@ case class PaimonScan(
case Some(num) =>
val bucketKey = bucketSpec.getBucketKeys.get(0)
if (requiredSchema.exists(f => conf.resolver(f.name,
bucketKey))) {
- Some(Expressions.bucket(num, StringUtils.quote(bucketKey)))
+ Some(Expressions.bucket(num, quote(bucketKey)))
} else {
None
}
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/BucketExpression.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/BucketExpression.scala
index 2a23170c50..ca22879fdb 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/BucketExpression.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/BucketExpression.scala
@@ -18,21 +18,16 @@
package org.apache.paimon.spark.commands
-import org.apache.paimon.data.serializer.InternalRowSerializer
-import org.apache.paimon.spark.SparkInternalRowWrapper
-import org.apache.paimon.spark.SparkTypeUtils.toPaimonType
import org.apache.paimon.spark.catalog.functions.PaimonFunctions
import org.apache.paimon.spark.catalog.functions.PaimonFunctions.BUCKET
-import org.apache.paimon.table.sink.KeyAndBucketExtractor.{bucket,
bucketKeyHashCode}
-import org.apache.paimon.types.{RowKind, RowType}
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow =>
SparkInternalRow}
import org.apache.spark.sql.catalyst.analysis.FunctionRegistry.FunctionBuilder
import org.apache.spark.sql.catalyst.analysis.FunctionRegistryBase
-import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo,
Literal, SpecificInternalRow}
+import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo,
SpecificInternalRow}
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.connector.catalog.functions.ScalarFunction
-import org.apache.spark.sql.types.{DataType, DataTypes, StructField,
StructType}
+import org.apache.spark.sql.types.{DataType, StructField, StructType}
/**
* The reason for adding it is that the current spark_catalog cannot access v2
functions, which
@@ -99,4 +94,5 @@ object BucketExpression {
(ident, info, builder)
}
+ def quote(columnName: String): String = s"`${columnName.replace("`", "``")}`"
}
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/write/PaimonWriteRequirement.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/write/PaimonWriteRequirement.scala
index 1a346deeec..097dbf7907 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/write/PaimonWriteRequirement.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/write/PaimonWriteRequirement.scala
@@ -18,8 +18,9 @@
package org.apache.paimon.spark.write
-import org.apache.paimon.table.{BucketMode, FileStoreTable}
+import org.apache.paimon.spark.commands.BucketExpression.quote
import org.apache.paimon.table.BucketMode._
+import org.apache.paimon.table.FileStoreTable
import org.apache.spark.sql.connector.distributions.{ClusteredDistribution,
Distribution, Distributions}
import org.apache.spark.sql.connector.expressions.{Expression, Expressions,
SortOrder}
@@ -63,7 +64,4 @@ object PaimonWriteRequirement {
PaimonWriteRequirement(distribution, EMPTY_ORDERING)
}
}
-
- private def quote(columnName: String): String =
- s"`${columnName.replace("`", "``")}`"
}