simhadri-g commented on code in PR #4431:
URL: https://github.com/apache/hive/pull/4431#discussion_r1245088566
##########
iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java:
##########
@@ -429,45 +434,50 @@ public boolean
setColStatistics(org.apache.hadoop.hive.ql.metadata.Table hmsTabl
ImmutableMap.of()));
writer.finish();
} catch (IOException e) {
- LOG.error(String.valueOf(e));
+ LOG.error("Unable to write stats to puffin file", e.getMessage());
}
return false;
}
@Override
public boolean
canProvideColStatistics(org.apache.hadoop.hive.ql.metadata.Table hmsTable) {
Table table = IcebergTableUtil.getTable(conf, hmsTable.getTTable());
- if (canSetColStatistics(hmsTable)) {
- Path statsPath = getStatsPath(table);
- try {
- FileSystem fs = statsPath.getFileSystem(conf);
- if (fs.exists(statsPath)) {
- return true;
- }
- } catch (IOException e) {
- LOG.warn("Exception when trying to find Iceberg column stats for
table:{} , snapshot:{} , " +
- "statsPath: {} , stack trace: {}", table.name(),
table.currentSnapshot(), statsPath, e);
+ return canSetColStatistics(hmsTable) && canProvideColStatistics(table,
table.currentSnapshot().snapshotId());
+ }
+
+ private boolean canProvideColStatistics(Table table, long snapshotId) {
+ Path statsPath = getStatsPath(table, snapshotId);
+ try {
+ FileSystem fs = statsPath.getFileSystem(conf);
+ if (fs.exists(statsPath)) {
+ return true;
}
+ } catch (IOException e) {
+ LOG.warn("Exception when trying to find Iceberg column stats for
table:{} , snapshot:{} , " +
+ "statsPath: {} , stack trace: {}", table.name(),
table.currentSnapshot(), statsPath, e);
}
return false;
}
@Override
public List<ColumnStatisticsObj>
getColStatistics(org.apache.hadoop.hive.ql.metadata.Table hmsTable) {
Table table = IcebergTableUtil.getTable(conf, hmsTable.getTTable());
- String statsPath = getStatsPath(table).toString();
+ Path statsPath = getStatsPath(table);
LOG.info("Using stats from puffin file at: {}", statsPath);
- try (PuffinReader reader =
Puffin.read(table.io().newInputFile(statsPath)).build()) {
+ return readColStats(table, statsPath).getStatsObj();
+ }
+
+ private ColumnStatistics readColStats(Table table, Path statsPath) {
+ try (PuffinReader reader =
Puffin.read(table.io().newInputFile(statsPath.toString())).build()) {
List<BlobMetadata> blobMetadata = reader.fileMetadata().blobs();
- Map<BlobMetadata, List<ColumnStatistics>> collect =
-
Streams.stream(reader.readAll(blobMetadata)).collect(Collectors.toMap(Pair::first,
- blobMetadataByteBufferPair -> SerializationUtils.deserialize(
-
ByteBuffers.toByteArray(blobMetadataByteBufferPair.second()))));
- return collect.get(blobMetadata.get(0)).get(0).getStatsObj();
+ Map<BlobMetadata, List<ColumnStatistics>> collect =
Streams.stream(reader.readAll(blobMetadata)).collect(
+ Collectors.toMap(Pair::first, blobMetadataByteBufferPair ->
SerializationUtils.deserialize(
+ ByteBuffers.toByteArray(blobMetadataByteBufferPair.second()))));
+ return collect.get(blobMetadata.get(0)).get(0);
} catch (IOException e) {
LOG.error("Error when trying to read iceberg col stats from puffin
files: ", e);
Review Comment:
Changed it to warn
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]