ramitg254 commented on code in PR #6089:
URL: https://github.com/apache/hive/pull/6089#discussion_r2583754720
##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreDirectSql.java:
##########
@@ -1977,116 +2020,96 @@ private List<ColumnStatisticsObj>
aggrStatsUseDB(String catName, String dbName,
// And, we also guarantee that the estimation makes sense by comparing
it to the
// UpperBound (calculated by "sum(\"NUM_DISTINCTS\")")
// and LowerBound (calculated by "max(\"NUM_DISTINCTS\")")
- + "avg((\"LONG_HIGH_VALUE\"-\"LONG_LOW_VALUE\")/cast(\"NUM_DISTINCTS\"
as decimal)),"
- +
"avg((\"DOUBLE_HIGH_VALUE\"-\"DOUBLE_LOW_VALUE\")/\"NUM_DISTINCTS\"),"
- + "avg((cast(\"BIG_DECIMAL_HIGH_VALUE\" as
decimal)-cast(\"BIG_DECIMAL_LOW_VALUE\" as decimal))/\"NUM_DISTINCTS\"),"
- + "sum(\"NUM_DISTINCTS\")" + " from " + PART_COL_STATS + ""
- + " inner join " + PARTITIONS + " on " + PART_COL_STATS +
".\"PART_ID\" = " + PARTITIONS + ".\"PART_ID\""
- + " inner join " + TBLS + " on " + PARTITIONS + ".\"TBL_ID\" = " +
TBLS + ".\"TBL_ID\""
- + " inner join " + DBS + " on " + TBLS + ".\"DB_ID\" = " + DBS +
".\"DB_ID\""
- + " where " + DBS + ".\"CTLG_NAME\" = ? and " + DBS + ".\"NAME\" = ?
and " + TBLS + ".\"TBL_NAME\" = ? ";
+ + "sum((\"LONG_HIGH_VALUE\"-\"LONG_LOW_VALUE\")/cast(\"NUM_DISTINCTS\"
as decimal)),"
+ +
"count((\"LONG_HIGH_VALUE\"-\"LONG_LOW_VALUE\")/cast(\"NUM_DISTINCTS\" as
decimal)),"
+ +
"sum((\"DOUBLE_HIGH_VALUE\"-\"DOUBLE_LOW_VALUE\")/\"NUM_DISTINCTS\"),"
+ +
"count((\"DOUBLE_HIGH_VALUE\"-\"DOUBLE_LOW_VALUE\")/\"NUM_DISTINCTS\"),"
+ + "sum((cast(\"BIG_DECIMAL_HIGH_VALUE\" as
decimal)-cast(\"BIG_DECIMAL_LOW_VALUE\" as decimal))/\"NUM_DISTINCTS\"),"
+ + "count((cast(\"BIG_DECIMAL_HIGH_VALUE\" as
decimal)-cast(\"BIG_DECIMAL_LOW_VALUE\" as decimal))/\"NUM_DISTINCTS\"),"
+ + "sum(\"NUM_DISTINCTS\")" + " from " + PART_COL_STATS + "" + " inner
join " + PARTITIONS + " on "
+ + PART_COL_STATS + ".\"PART_ID\" = " + PARTITIONS + ".\"PART_ID\"" + "
inner join " + TBLS + " on " + PARTITIONS
+ + ".\"TBL_ID\" = " + TBLS + ".\"TBL_ID\"" + " inner join " + DBS + "
on " + TBLS + ".\"DB_ID\" = " + DBS
+ + ".\"DB_ID\"" + " where " + DBS + ".\"CTLG_NAME\" = ? and " + DBS +
".\"NAME\" = ? and " + TBLS
+ + ".\"TBL_NAME\" = ? ";
String queryText = null;
- long start = 0;
- long end = 0;
boolean doTrace = LOG.isDebugEnabled();
ForwardQueryResult<?> fqr = null;
// Check if the status of all the columns of all the partitions exists
// Extrapolation is not needed.
if (areAllPartsFound) {
- queryText = commonPrefix + " and \"COLUMN_NAME\" in (" +
makeParams(colNames.size()) + ")"
- + " and " + PARTITIONS + ".\"PART_NAME\" in (" +
makeParams(partNames.size()) + ")"
- + " and \"ENGINE\" = ? "
- + " group by \"COLUMN_NAME\", \"COLUMN_TYPE\"";
- start = doTrace ? System.nanoTime() : 0;
- try (QueryWrapper query = new
QueryWrapper(pm.newQuery("javax.jdo.query.SQL", queryText))) {
- Object qResult = executeWithArray(query.getInnerQuery(),
- prepareParams(catName, dbName, tableName, partNames, colNames,
- engine), queryText);
- if (qResult == null) {
- return Collections.emptyList();
- }
- end = doTrace ? System.nanoTime() : 0;
- MetastoreDirectSqlUtils.timingTrace(doTrace, queryText, start, end);
- List<Object[]> list = MetastoreDirectSqlUtils.ensureList(qResult);
- List<ColumnStatisticsObj> colStats =
- new ArrayList<ColumnStatisticsObj>(list.size());
- for (Object[] row : list) {
- colStats.add(prepareCSObjWithAdjustedNDV(row, 0,
- useDensityFunctionForNDVEstimation, ndvTuner));
+ queryText = commonPrefix + " and \"COLUMN_NAME\" in (%1$s)" + " and " +
PARTITIONS + ".\"PART_NAME\" in (%2$s)"
+ + " and \"ENGINE\" = ? " + " group by \"COLUMN_NAME\",
\"COLUMN_TYPE\"";
+ Batchable<String, Object[]> b = jobsBatching(queryText, catName, dbName,
tableName, partNames, engine, doTrace);
+ List<ColumnStatisticsObj> colStats = new ArrayList<>(colNames.size());
+ try {
+ List<Object[]> list = Batchable.runBatched(batchSize, colNames, b);
+ Map<String, List<Object[]>> colSubList = columnWiseSubList(list);
+ for (Map.Entry<String, List<Object[]>> entry : colSubList.entrySet()) {
Review Comment:
I understand but we will need map to store the redundant column results
obtained from different batches for same column name and those needs to be
merged and other issue is we can't be sure of the no. of elements for a
particular column name which needs to be merged as there are else cases for
`areAllPartsFound` otherwise we would have gone for sorting the obtained
results and then merging the results obtained at a particular gap.
currently we are only calling `columnStatisticsObjWithAdjustedNDV` once per
column as we are calling it when we obtained all the batched results and
maintained that in the map and it is doing merging per column.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]