This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new e2717c9aae [spark] Avoid partition to be negative when
partition.hashCode() equals to Integer.MIN_VALUE (#5662)
e2717c9aae is described below
commit e2717c9aae133ae2bbe26ac010c06ea7e6db0bb4
Author: Zouxxyy <[email protected]>
AuthorDate: Tue May 27 07:51:30 2025 +0800
[spark] Avoid partition to be negative when partition.hashCode() equals to
Integer.MIN_VALUE (#5662)
---
.../scala/org/apache/paimon/spark/commands/PaimonSparkWriter.scala | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonSparkWriter.scala
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonSparkWriter.scala
index 8d12d79e8d..23e90df92b 100644
---
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonSparkWriter.scala
+++
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/commands/PaimonSparkWriter.scala
@@ -404,14 +404,14 @@ case class PaimonSparkWriter(table: FileStoreTable) {
row => {
val bytes: Array[Byte] =
SerializationUtils.serializeBinaryRow(bootstrapSer.toBinaryRow(row))
- (Math.abs(keyPartProject(row).hashCode()),
(KeyPartOrRow.KEY_PART, bytes))
+ (keyPartProject(row).hashCode(), (KeyPartOrRow.KEY_PART,
bytes))
}) ++ iter.map(
r => {
val sparkRow =
new SparkRow(rowType, r, SparkRowUtils.getRowKind(r,
rowKindColIdx))
val bytes: Array[Byte] =
SerializationUtils.serializeBinaryRow(rowSer.toBinaryRow(sparkRow))
- (Math.abs(rowProject(sparkRow).hashCode()), (KeyPartOrRow.ROW,
bytes))
+ (rowProject(sparkRow).hashCode(), (KeyPartOrRow.ROW, bytes))
})
}
}
@@ -471,6 +471,6 @@ case class PaimonSparkWriter(table: FileStoreTable) {
private case class ModPartitioner(partitions: Int) extends Partitioner {
override def numPartitions: Int = partitions
- override def getPartition(key: Any): Int = key.asInstanceOf[Int] %
numPartitions
+ override def getPartition(key: Any): Int = Math.abs(key.asInstanceOf[Int]
% numPartitions)
}
}