This is an automated email from the ASF dual-hosted git repository.
etudenhoefner pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/main by this push:
new 8c44cccf0d Spark 3.5,4.0: Fix test parameters (#14376)
8c44cccf0d is described below
commit 8c44cccf0dc3e7dcd9df53b5c1e663aaf82f46d6
Author: Tom Tanaka <[email protected]>
AuthorDate: Mon Oct 20 15:29:51 2025 +0900
Spark 3.5,4.0: Fix test parameters (#14376)
---
.../spark/extensions/TestRewritePositionDeleteFiles.java | 13 +++++++++----
.../spark/extensions/TestRewritePositionDeleteFiles.java | 13 +++++++++----
2 files changed, 18 insertions(+), 8 deletions(-)
diff --git
a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
index 5dee0642bb..ad49da87be 100644
---
a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
+++
b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
@@ -44,6 +44,7 @@ import org.apache.iceberg.FileScanTask;
import org.apache.iceberg.Files;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.MetadataTableUtils;
+import org.apache.iceberg.Parameter;
import org.apache.iceberg.Parameters;
import org.apache.iceberg.PositionDeletesScanTask;
import org.apache.iceberg.RowDelta;
@@ -85,17 +86,21 @@ public class TestRewritePositionDeleteFiles extends
ExtensionsTestBase {
private static final int DELETE_FILES_PER_PARTITION = 2;
private static final int DELETE_FILE_SIZE = 10;
- @Parameters(name = "formatVersion = {0}, catalogName = {1}, implementation =
{2}, config = {3}")
+ @Parameters(name = "catalogName = {0}, implementation = {1}, config = {2},
formatVersion = {3}")
public static Object[][] parameters() {
return new Object[][] {
{
SparkCatalogConfig.HIVE.catalogName(),
SparkCatalogConfig.HIVE.implementation(),
- CATALOG_PROPS
+ CATALOG_PROPS,
+ 2
}
};
}
+ @Parameter(index = 3)
+ private int formatVersion;
+
@AfterEach
public void cleanup() {
sql("DROP TABLE IF EXISTS %s", tableName);
@@ -247,8 +252,8 @@ public class TestRewritePositionDeleteFiles extends
ExtensionsTestBase {
"CREATE TABLE %s (id long, %s %s, c1 string, c2 string) "
+ "USING iceberg "
+ "PARTITIONED BY (%s) "
- + "TBLPROPERTIES('format-version'='2')",
- tableName, partitionCol, partitionType, partitionTransform);
+ + "TBLPROPERTIES('format-version'='%d')",
+ tableName, partitionCol, partitionType, partitionTransform,
formatVersion);
}
private void insertData(Function<Integer, ?> partitionValueFunction) throws
Exception {
diff --git
a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
index 5dee0642bb..ad49da87be 100644
---
a/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
+++
b/spark/v4.0/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestRewritePositionDeleteFiles.java
@@ -44,6 +44,7 @@ import org.apache.iceberg.FileScanTask;
import org.apache.iceberg.Files;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.MetadataTableUtils;
+import org.apache.iceberg.Parameter;
import org.apache.iceberg.Parameters;
import org.apache.iceberg.PositionDeletesScanTask;
import org.apache.iceberg.RowDelta;
@@ -85,17 +86,21 @@ public class TestRewritePositionDeleteFiles extends
ExtensionsTestBase {
private static final int DELETE_FILES_PER_PARTITION = 2;
private static final int DELETE_FILE_SIZE = 10;
- @Parameters(name = "formatVersion = {0}, catalogName = {1}, implementation =
{2}, config = {3}")
+ @Parameters(name = "catalogName = {0}, implementation = {1}, config = {2},
formatVersion = {3}")
public static Object[][] parameters() {
return new Object[][] {
{
SparkCatalogConfig.HIVE.catalogName(),
SparkCatalogConfig.HIVE.implementation(),
- CATALOG_PROPS
+ CATALOG_PROPS,
+ 2
}
};
}
+ @Parameter(index = 3)
+ private int formatVersion;
+
@AfterEach
public void cleanup() {
sql("DROP TABLE IF EXISTS %s", tableName);
@@ -247,8 +252,8 @@ public class TestRewritePositionDeleteFiles extends
ExtensionsTestBase {
"CREATE TABLE %s (id long, %s %s, c1 string, c2 string) "
+ "USING iceberg "
+ "PARTITIONED BY (%s) "
- + "TBLPROPERTIES('format-version'='2')",
- tableName, partitionCol, partitionType, partitionTransform);
+ + "TBLPROPERTIES('format-version'='%d')",
+ tableName, partitionCol, partitionType, partitionTransform,
formatVersion);
}
private void insertData(Function<Integer, ?> partitionValueFunction) throws
Exception {