bhasudha commented on a change in pull request #689: [HUDI-25] Optimize
HoodieInputFormat.listStatus for faster Hive Incremental queries
URL: https://github.com/apache/incubator-hudi/pull/689#discussion_r291482454
##########
File path:
hoodie-hadoop-mr/src/main/java/com/uber/hoodie/hadoop/HoodieInputFormat.java
##########
@@ -62,65 +64,57 @@
@Override
public FileStatus[] listStatus(JobConf job) throws IOException {
- // Get all the file status from FileInputFormat and then do the filter
- FileStatus[] fileStatuses = super.listStatus(job);
- Map<HoodieTableMetaClient, List<FileStatus>> groupedFileStatus =
groupFileStatus(fileStatuses);
- LOG.info("Found a total of " + groupedFileStatus.size() + " groups");
+ // Segregate inputPaths[] to incremental, non incremental and non hoodie
paths
+ List<String> incrementalTables =
HoodieHiveUtil.getIncrementalTableNames(Job.getInstance(job));
+ InputPathHandler inputPathHandler = new InputPathHandler(conf,
getInputPaths(job),
+ incrementalTables);
List<FileStatus> returns = new ArrayList<>();
- for (Map.Entry<HoodieTableMetaClient, List<FileStatus>> entry :
groupedFileStatus.entrySet()) {
- HoodieTableMetaClient metadata = entry.getKey();
- if (metadata == null) {
- // Add all the paths which are not hoodie specific
- returns.addAll(entry.getValue());
+
+ Map<String, HoodieTableMetaClient> tableMetaClientMap = inputPathHandler
+ .getTableMetaClientMap();
+ // process incremental pulls first
+ for (String table : incrementalTables) {
+ HoodieTableMetaClient metaClient = tableMetaClientMap.get(table);
+ if (metaClient == null) {
+ // This can happen when the INCREMENTAL mode is set for a table but
there were no InputPaths
+ // in the jobConf
continue;
}
+ List<Path> inputPaths =
inputPathHandler.getGroupedIncrementalPaths().get(metaClient);
+ List<FileStatus> result = listStatusForIncrementalMode(job, metaClient,
inputPaths);
+ if (result != null) {
+ returns.addAll(result);
+ }
+ }
- FileStatus[] statuses = entry.getValue().toArray(new
FileStatus[entry.getValue().size()]);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Hoodie Metadata initialized with completed commit Ts as :"
+ metadata);
+ // process non hoodie Paths next.
+ List<Path> nonHoodiePaths = inputPathHandler.getNonHoodieInputPaths();
+ if (nonHoodiePaths.size() > 0) {
+ setInputPaths(job, nonHoodiePaths.toArray(new
Path[nonHoodiePaths.size()]));
+ FileStatus[] fileStatuses = super.listStatus(job);
+ for (int i = 0; i < fileStatuses.length; i++) {
+ returns.add(fileStatuses[i]);
}
- String tableName = metadata.getTableConfig().getTableName();
- String mode = HoodieHiveUtil.readMode(Job.getInstance(job), tableName);
- // Get all commits, delta commits, compactions, as all of them produce a
base parquet file
- // today
- HoodieTimeline timeline =
metadata.getActiveTimeline().getCommitsTimeline()
- .filterCompletedInstants();
- TableFileSystemView.ReadOptimizedView roView = new
HoodieTableFileSystemView(metadata,
- timeline, statuses);
-
- if (HoodieHiveUtil.INCREMENTAL_SCAN_MODE.equals(mode)) {
- // this is of the form commitTs_partition_sequenceNumber
- String lastIncrementalTs = HoodieHiveUtil
- .readStartCommitTime(Job.getInstance(job), tableName);
- // Total number of commits to return in this batch. Set this to -1 to
get all the commits.
- Integer maxCommits =
HoodieHiveUtil.readMaxCommits(Job.getInstance(job), tableName);
- LOG.info("Last Incremental timestamp was set as " + lastIncrementalTs);
- List<String> commitsToReturn =
timeline.findInstantsAfter(lastIncrementalTs, maxCommits)
-
.getInstants().map(HoodieInstant::getTimestamp).collect(Collectors.toList());
- List<HoodieDataFile> filteredFiles =
roView.getLatestDataFilesInRange(commitsToReturn)
- .collect(Collectors.toList());
- for (HoodieDataFile filteredFile : filteredFiles) {
- LOG.info("Processing incremental hoodie file - " +
filteredFile.getPath());
- filteredFile = checkFileStatus(filteredFile);
- returns.add(filteredFile.getFileStatus());
- }
- LOG.info("Total paths to process after hoodie incremental filter " +
filteredFiles.size());
- } else {
- // filter files on the latest commit found
- List<HoodieDataFile> filteredFiles = roView.getLatestDataFiles()
- .collect(Collectors.toList());
- LOG.info("Total paths to process after hoodie filter " +
filteredFiles.size());
- for (HoodieDataFile filteredFile : filteredFiles) {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Processing latest hoodie file - " +
filteredFile.getPath());
- }
- filteredFile = checkFileStatus(filteredFile);
- returns.add(filteredFile.getFileStatus());
+ }
+
+ // process non incremental queries next.
Review comment:
yes.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
With regards,
Apache Git Services