This is an automated email from the ASF dual-hosted git repository.
russellspitzer pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git
The following commit(s) were added to refs/heads/main by this push:
new e8bb8b502c Test: Add a test utility method to programmatically create
expected partition specs (#8467)
e8bb8b502c is described below
commit e8bb8b502c981f6e5dea27cc6f833655bf79f4c6
Author: roryqi <[email protected]>
AuthorDate: Wed Nov 8 04:55:41 2023 +0800
Test: Add a test utility method to programmatically create expected
partition specs (#8467)
---
.../test/java/org/apache/iceberg/TestHelpers.java | 40 +++++++++++++
.../extensions/TestAlterTablePartitionFields.java | 66 +++++++---------------
.../spark/source/TestForwardCompatibility.java | 17 +++---
.../TestMetadataTablesWithPartitionEvolution.java | 10 ++--
.../spark/source/TestSparkMetadataColumns.java | 10 ++--
.../extensions/TestAlterTablePartitionFields.java | 66 +++++++---------------
.../spark/source/TestForwardCompatibility.java | 18 +++---
.../TestMetadataTablesWithPartitionEvolution.java | 10 ++--
.../spark/source/TestSparkMetadataColumns.java | 10 ++--
.../extensions/TestAlterTablePartitionFields.java | 66 +++++++---------------
.../spark/source/TestForwardCompatibility.java | 18 +++---
.../TestMetadataTablesWithPartitionEvolution.java | 10 ++--
.../spark/source/TestSparkMetadataColumns.java | 10 ++--
.../extensions/TestAlterTablePartitionFields.java | 66 +++++++---------------
.../spark/source/TestForwardCompatibility.java | 17 +++---
.../TestMetadataTablesWithPartitionEvolution.java | 10 ++--
.../spark/source/TestSparkMetadataColumns.java | 10 ++--
17 files changed, 214 insertions(+), 240 deletions(-)
diff --git a/api/src/test/java/org/apache/iceberg/TestHelpers.java
b/api/src/test/java/org/apache/iceberg/TestHelpers.java
index 153e2de7ea..890ae8abd4 100644
--- a/api/src/test/java/org/apache/iceberg/TestHelpers.java
+++ b/api/src/test/java/org/apache/iceberg/TestHelpers.java
@@ -265,6 +265,10 @@ public class TestHelpers {
}
}
+ public static ExpectedSpecBuilder newExpectedSpecBuilder() {
+ return new ExpectedSpecBuilder();
+ }
+
public static class KryoHelpers {
private KryoHelpers() {}
@@ -667,4 +671,40 @@ public class TestHelpers {
return null;
}
}
+
+ public static class ExpectedSpecBuilder {
+ private final UnboundPartitionSpec.Builder unboundPartitionSpecBuilder;
+
+ private Schema schema;
+
+ private ExpectedSpecBuilder() {
+ this.unboundPartitionSpecBuilder = UnboundPartitionSpec.builder();
+ }
+
+ public ExpectedSpecBuilder withSchema(Schema newSchema) {
+ this.schema = newSchema;
+ return this;
+ }
+
+ public ExpectedSpecBuilder withSpecId(int newSpecId) {
+ unboundPartitionSpecBuilder.withSpecId(newSpecId);
+ return this;
+ }
+
+ public ExpectedSpecBuilder addField(
+ String transformAsString, int sourceId, int partitionId, String name) {
+ unboundPartitionSpecBuilder.addField(transformAsString, sourceId,
partitionId, name);
+ return this;
+ }
+
+ public ExpectedSpecBuilder addField(String transformAsString, int
sourceId, String name) {
+ unboundPartitionSpecBuilder.addField(transformAsString, sourceId, name);
+ return this;
+ }
+
+ public PartitionSpec build() {
+ Preconditions.checkNotNull(schema, "Field schema is missing");
+ return unboundPartitionSpecBuilder.build().bind(schema);
+ }
+ }
}
diff --git
a/spark/v3.2/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
b/spark/v3.2/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
index 2ecf6b0c4c..042e87c729 100644
---
a/spark/v3.2/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
+++
b/spark/v3.2/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.spark.extensions;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.spark.SparkCatalogConfig;
import org.apache.iceberg.spark.source.SparkTable;
import org.apache.spark.sql.connector.catalog.CatalogManager;
@@ -392,17 +392,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -431,17 +425,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -470,17 +458,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -509,17 +491,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
diff --git
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
index fe44023590..96bebf7c88 100644
---
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
+++
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
@@ -35,7 +35,6 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.ManifestWriter;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
@@ -75,14 +74,18 @@ public class TestForwardCompatibility {
// create a spec for the schema that uses a "zero" transform that produces
all 0s
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("zero", 1, "id_zero")
+ .build();
// create a fake spec to use to write table metadata
private static final PartitionSpec FAKE_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"identity\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("identity", 1, "id_zero")
+ .build();
@Rule public TemporaryFolder temp = new TemporaryFolder();
diff --git
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
index a62199181a..0baaef1374 100644
---
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
+++
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
@@ -38,10 +38,10 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
@@ -627,9 +627,11 @@ public class TestMetadataTablesWithPartitionEvolution
extends SparkCatalogTestBa
Table table = validationCatalog.loadTable(tableIdent);
PartitionSpec unknownSpec =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
// replace the table spec to include an unknown transform
TableOperations ops = ((HasTableOperations) table).operations();
diff --git
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
index 79b755872d..5c7929112f 100644
---
a/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
+++
b/spark/v3.2/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
@@ -35,11 +35,11 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
@@ -75,9 +75,11 @@ public class TestSparkMetadataColumns extends SparkTestBase {
Types.NestedField.optional(3, "data", Types.StringType.get()));
private static final PartitionSpec SPEC = PartitionSpec.unpartitioned();
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
@Parameterized.Parameters(name = "fileFormat = {0}, vectorized = {1},
formatVersion = {2}")
public static Object[][] parameters() {
diff --git
a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
index 0e978e52e5..948fc462de 100644
---
a/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
+++
b/spark/v3.3/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.spark.extensions;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.spark.SparkCatalogConfig;
import org.apache.iceberg.spark.source.SparkTable;
import org.apache.spark.sql.connector.catalog.CatalogManager;
@@ -392,17 +392,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -431,17 +425,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -470,17 +458,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -509,17 +491,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
diff --git
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
index fe44023590..6ab9e57949 100644
---
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
+++
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
@@ -35,7 +35,6 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.ManifestWriter;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
@@ -75,14 +74,19 @@ public class TestForwardCompatibility {
// create a spec for the schema that uses a "zero" transform that produces
all 0s
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("zero", 1, "id_zero")
+ .build();
+
// create a fake spec to use to write table metadata
private static final PartitionSpec FAKE_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"identity\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("identity", 1, "id_zero")
+ .build();
@Rule public TemporaryFolder temp = new TemporaryFolder();
diff --git
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
index a62199181a..0baaef1374 100644
---
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
+++
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
@@ -38,10 +38,10 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
@@ -627,9 +627,11 @@ public class TestMetadataTablesWithPartitionEvolution
extends SparkCatalogTestBa
Table table = validationCatalog.loadTable(tableIdent);
PartitionSpec unknownSpec =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
// replace the table spec to include an unknown transform
TableOperations ops = ((HasTableOperations) table).operations();
diff --git
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
index 79b755872d..5c7929112f 100644
---
a/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
+++
b/spark/v3.3/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
@@ -35,11 +35,11 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
@@ -75,9 +75,11 @@ public class TestSparkMetadataColumns extends SparkTestBase {
Types.NestedField.optional(3, "data", Types.StringType.get()));
private static final PartitionSpec SPEC = PartitionSpec.unpartitioned();
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
@Parameterized.Parameters(name = "fileFormat = {0}, vectorized = {1},
formatVersion = {2}")
public static Object[][] parameters() {
diff --git
a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
index 0e978e52e5..948fc462de 100644
---
a/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
+++
b/spark/v3.4/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.spark.extensions;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.spark.SparkCatalogConfig;
import org.apache.iceberg.spark.source.SparkTable;
import org.apache.spark.sql.connector.catalog.CatalogManager;
@@ -392,17 +392,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -431,17 +425,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -470,17 +458,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -509,17 +491,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
diff --git
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
index 73e572ecae..80a8196f8a 100644
---
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
+++
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
@@ -34,7 +34,6 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.ManifestWriter;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
@@ -75,14 +74,19 @@ public class TestForwardCompatibility {
// create a spec for the schema that uses a "zero" transform that produces
all 0s
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("zero", 1, "id_zero")
+ .build();
+
// create a fake spec to use to write table metadata
private static final PartitionSpec FAKE_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"identity\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("identity", 1, "id_zero")
+ .build();
@Rule public TemporaryFolder temp = new TemporaryFolder();
diff --git
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
index b4b60dc02d..ea65fead10 100644
---
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
+++
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
@@ -37,10 +37,10 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
@@ -627,9 +627,11 @@ public class TestMetadataTablesWithPartitionEvolution
extends SparkCatalogTestBa
Table table = validationCatalog.loadTable(tableIdent);
PartitionSpec unknownSpec =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
// replace the table spec to include an unknown transform
TableOperations ops = ((HasTableOperations) table).operations();
diff --git
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
index 8a2fae8117..b2361c188c 100644
---
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
+++
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
@@ -34,11 +34,11 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
@@ -75,9 +75,11 @@ public class TestSparkMetadataColumns extends SparkTestBase {
Types.NestedField.optional(3, "data", Types.StringType.get()));
private static final PartitionSpec SPEC = PartitionSpec.unpartitioned();
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
@Parameterized.Parameters(name = "fileFormat = {0}, vectorized = {1},
formatVersion = {2}")
public static Object[][] parameters() {
diff --git
a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
index 0e978e52e5..948fc462de 100644
---
a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
+++
b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestAlterTablePartitionFields.java
@@ -19,9 +19,9 @@
package org.apache.iceberg.spark.extensions;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.spark.SparkCatalogConfig;
import org.apache.iceberg.spark.source.SparkTable;
import org.apache.spark.sql.connector.catalog.CatalogManager;
@@ -392,17 +392,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -431,17 +425,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -470,17 +458,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"ts_hour\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "ts_hour")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
@@ -509,17 +491,11 @@ public class TestAlterTablePartitionFields extends
SparkExtensionsTestBase {
.build();
} else {
expected =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{\n"
- + " \"spec-id\" : 2,\n"
- + " \"fields\" : [ {\n"
- + " \"name\" : \"hour_col\",\n"
- + " \"transform\" : \"hour\",\n"
- + " \"source-id\" : 3,\n"
- + " \"field-id\" : 1001\n"
- + " } ]\n"
- + "}");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(2)
+ .addField("hour", 3, 1001, "hour_col")
+ .build();
}
Assert.assertEquals(
"Should changed from daily to hourly partitioned field", expected,
table.spec());
diff --git
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
index 73e572ecae..446989d1af 100644
---
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
+++
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestForwardCompatibility.java
@@ -34,7 +34,6 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.ManifestWriter;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableProperties;
@@ -75,14 +74,18 @@ public class TestForwardCompatibility {
// create a spec for the schema that uses a "zero" transform that produces
all 0s
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("zero", 1, "id_zero")
+ .build();
// create a fake spec to use to write table metadata
private static final PartitionSpec FAKE_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 0, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"identity\", \"source-id\": 1 } ] }");
+ org.apache.iceberg.TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(0)
+ .addField("identity", 1, "id_zero")
+ .build();
@Rule public TemporaryFolder temp = new TemporaryFolder();
diff --git
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
index b4b60dc02d..ea65fead10 100644
---
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
+++
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestMetadataTablesWithPartitionEvolution.java
@@ -37,10 +37,10 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataTableType;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
@@ -627,9 +627,11 @@ public class TestMetadataTablesWithPartitionEvolution
extends SparkCatalogTestBa
Table table = validationCatalog.loadTable(tableIdent);
PartitionSpec unknownSpec =
- PartitionSpecParser.fromJson(
- table.schema(),
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(table.schema())
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
// replace the table spec to include an unknown transform
TableOperations ops = ((HasTableOperations) table).operations();
diff --git
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
index 8a2fae8117..b2361c188c 100644
---
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
+++
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkMetadataColumns.java
@@ -34,11 +34,11 @@ import org.apache.iceberg.FileFormat;
import org.apache.iceberg.HasTableOperations;
import org.apache.iceberg.MetadataColumns;
import org.apache.iceberg.PartitionSpec;
-import org.apache.iceberg.PartitionSpecParser;
import org.apache.iceberg.Schema;
import org.apache.iceberg.Table;
import org.apache.iceberg.TableMetadata;
import org.apache.iceberg.TableOperations;
+import org.apache.iceberg.TestHelpers;
import org.apache.iceberg.exceptions.ValidationException;
import org.apache.iceberg.expressions.Expressions;
import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
@@ -75,9 +75,11 @@ public class TestSparkMetadataColumns extends SparkTestBase {
Types.NestedField.optional(3, "data", Types.StringType.get()));
private static final PartitionSpec SPEC = PartitionSpec.unpartitioned();
private static final PartitionSpec UNKNOWN_SPEC =
- PartitionSpecParser.fromJson(
- SCHEMA,
- "{ \"spec-id\": 1, \"fields\": [ { \"name\": \"id_zero\",
\"transform\": \"zero\", \"source-id\": 1 } ] }");
+ TestHelpers.newExpectedSpecBuilder()
+ .withSchema(SCHEMA)
+ .withSpecId(1)
+ .addField("zero", 1, "id_zero")
+ .build();
@Parameterized.Parameters(name = "fileFormat = {0}, vectorized = {1},
formatVersion = {2}")
public static Object[][] parameters() {