pvary commented on code in PR #15633:
URL: https://github.com/apache/iceberg/pull/15633#discussion_r2959083740
##########
data/src/test/java/org/apache/iceberg/data/BaseFormatModelTests.java:
##########
@@ -317,7 +321,316 @@ void
testPositionDeleteWriterEngineWriteGenericRead(FileFormat fileFormat) throw
DataTestHelpers.assertEquals(positionDeleteSchema.asStruct(), records,
readRecords);
}
+ @ParameterizedTest
+ @FieldSource("FILE_FORMATS")
+ /* Write with Generic Record, read with projected engine type T (narrow
schema) */
+ void testReaderBuilderProjection(FileFormat fileFormat) throws IOException {
+ DataGenerator dataGenerator = new DataGenerators.DefaultSchema();
+ Schema fullSchema = dataGenerator.schema();
+
+ List<Types.NestedField> columns = fullSchema.columns();
+ List<Types.NestedField> projectedColumns =
+ IntStream.range(0, columns.size())
+ .filter(i -> i % 2 == 1)
+ .mapToObj(columns::get)
+ .collect(Collectors.toList());
+ if (projectedColumns.isEmpty()) {
+ projectedColumns = ImmutableList.of(columns.get(columns.size() - 1));
+ }
+
+ Schema projectedSchema = new Schema(projectedColumns);
+
+ List<Record> genericRecords = dataGenerator.generateRecords();
+ writeGenericRecords(fileFormat, fullSchema, genericRecords);
+
+ List<Record> projectedGenericRecords = projectRecords(genericRecords,
projectedSchema);
+ List<T> expectedEngineRecords =
+ convertToEngineRecords(projectedGenericRecords, projectedSchema);
+
+ InputFile inputFile = encryptedFile.encryptingOutputFile().toInputFile();
+ List<T> readRecords;
+ try (CloseableIterable<T> reader =
+ FormatModelRegistry.readBuilder(fileFormat, engineType(), inputFile)
+ .project(projectedSchema)
+ .engineProjection(engineSchema(projectedSchema))
+ .build()) {
+ readRecords = ImmutableList.copyOf(reader);
+ }
+
+ assertEquals(projectedSchema, expectedEngineRecords, readRecords);
+ }
+
+ @ParameterizedTest
+ @FieldSource("FILE_FORMATS")
+ void testReaderBuilderFilter(FileFormat fileFormat) throws IOException {
+
+ assumeSupports(fileFormat, "filter");
+
+ DataGenerator dataGenerator = new DataGenerators.DefaultSchema();
+ Schema schema = dataGenerator.schema();
+
+ List<Record> genericRecords = dataGenerator.generateRecords();
+ writeGenericRecords(fileFormat, schema, genericRecords);
+
+ // Construct a filter condition that is smaller than the minimum value to
achieve file-level
+ // filtering.
+ Types.NestedField firstField = schema.columns().get(0);
+ Expression filter = filterFieldExpression(firstField, schema,
genericRecords);
+
+ InputFile inputFile = encryptedFile.encryptingOutputFile().toInputFile();
+ List<T> readRecords;
+ try (CloseableIterable<T> reader =
+ FormatModelRegistry.readBuilder(fileFormat, engineType(), inputFile)
+ .project(schema)
+ .engineProjection(engineSchema(schema))
+ .filter(filter)
+ .build()) {
+ readRecords = ImmutableList.copyOf(reader);
+ }
+
+ assertThat(readRecords).isEmpty();
Review Comment:
Could we generate enough records, that we actually has some results, but not
all of them?
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]