This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new 9f999a7142 [spark] Fix the IOManager not work in spark reader (#6401)
9f999a7142 is described below

commit 9f999a714218e5557fb768d59a8eceb16d9e665e
Author: WenjunMin <[email protected]>
AuthorDate: Tue Oct 14 21:33:08 2025 +0800

    [spark] Fix the IOManager not work in spark reader (#6401)
---
 .../src/main/scala/org/apache/paimon/spark/PaimonPartitionReader.scala  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonPartitionReader.scala
 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonPartitionReader.scala
index a3ff4cf61e..582e946dda 100644
--- 
a/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonPartitionReader.scala
+++ 
b/paimon-spark/paimon-spark-common/src/main/scala/org/apache/paimon/spark/PaimonPartitionReader.scala
@@ -44,10 +44,10 @@ case class PaimonPartitionReader(
 ) extends PartitionReader[InternalRow] {
 
   private val splits: Iterator[Split] = partition.splits.toIterator
-  @Nullable private var currentRecordReader = readSplit()
   private var advanced = false
   private var currentRow: PaimonInternalRow = _
   private val ioManager: IOManager = createIOManager()
+  @Nullable private var currentRecordReader = readSplit()
   private val sparkRow: SparkInternalRow = {
     val dataFields = new JList(readBuilder.readType().getFields)
     dataFields.addAll(metadataColumns.map(_.toPaimonDataField).asJava)

Reply via email to